relay-palm.go 1.7 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859
  1. package controller
  2. import (
  3. "fmt"
  4. "github.com/gin-gonic/gin"
  5. )
  6. type PaLMChatMessage struct {
  7. Author string `json:"author"`
  8. Content string `json:"content"`
  9. }
  10. type PaLMFilter struct {
  11. Reason string `json:"reason"`
  12. Message string `json:"message"`
  13. }
  14. // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#request-body
  15. type PaLMChatRequest struct {
  16. Prompt []Message `json:"prompt"`
  17. Temperature float64 `json:"temperature"`
  18. CandidateCount int `json:"candidateCount"`
  19. TopP float64 `json:"topP"`
  20. TopK int `json:"topK"`
  21. }
  22. // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage#response-body
  23. type PaLMChatResponse struct {
  24. Candidates []Message `json:"candidates"`
  25. Messages []Message `json:"messages"`
  26. Filters []PaLMFilter `json:"filters"`
  27. }
  28. func relayPaLM(openAIRequest GeneralOpenAIRequest, c *gin.Context) *OpenAIErrorWithStatusCode {
  29. // https://developers.generativeai.google/api/rest/generativelanguage/models/generateMessage
  30. messages := make([]PaLMChatMessage, 0, len(openAIRequest.Messages))
  31. for _, message := range openAIRequest.Messages {
  32. var author string
  33. if message.Role == "user" {
  34. author = "0"
  35. } else {
  36. author = "1"
  37. }
  38. messages = append(messages, PaLMChatMessage{
  39. Author: author,
  40. Content: message.Content,
  41. })
  42. }
  43. request := PaLMChatRequest{
  44. Prompt: nil,
  45. Temperature: openAIRequest.Temperature,
  46. CandidateCount: openAIRequest.N,
  47. TopP: openAIRequest.TopP,
  48. TopK: openAIRequest.MaxTokens,
  49. }
  50. // TODO: forward request to PaLM & convert response
  51. fmt.Print(request)
  52. return nil
  53. }