relay-gemini-native.go 3.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. package gemini
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "github.com/QuantumNous/new-api/common"
  7. "github.com/QuantumNous/new-api/constant"
  8. "github.com/QuantumNous/new-api/dto"
  9. "github.com/QuantumNous/new-api/logger"
  10. relaycommon "github.com/QuantumNous/new-api/relay/common"
  11. "github.com/QuantumNous/new-api/relay/helper"
  12. "github.com/QuantumNous/new-api/service"
  13. "github.com/QuantumNous/new-api/types"
  14. "github.com/gin-gonic/gin"
  15. )
  16. func GeminiTextGenerationHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  17. defer service.CloseResponseBodyGracefully(resp)
  18. // 读取响应体
  19. responseBody, err := io.ReadAll(resp.Body)
  20. if err != nil {
  21. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  22. }
  23. if common.DebugEnabled {
  24. println(string(responseBody))
  25. }
  26. // 解析为 Gemini 原生响应格式
  27. var geminiResponse dto.GeminiChatResponse
  28. err = common.Unmarshal(responseBody, &geminiResponse)
  29. if err != nil {
  30. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  31. }
  32. if len(geminiResponse.Candidates) == 0 && geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
  33. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
  34. }
  35. // 计算使用量(基于 UsageMetadata)
  36. usage := buildUsageFromGeminiMetadata(geminiResponse.UsageMetadata, info.GetEstimatePromptTokens())
  37. service.IOCopyBytesGracefully(c, resp, responseBody)
  38. return &usage, nil
  39. }
  40. func NativeGeminiEmbeddingHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.Usage, *types.NewAPIError) {
  41. defer service.CloseResponseBodyGracefully(resp)
  42. responseBody, err := io.ReadAll(resp.Body)
  43. if err != nil {
  44. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  45. }
  46. if common.DebugEnabled {
  47. println(string(responseBody))
  48. }
  49. usage := service.ResponseText2Usage(c, "", info.UpstreamModelName, info.GetEstimatePromptTokens())
  50. if info.IsGeminiBatchEmbedding {
  51. var geminiResponse dto.GeminiBatchEmbeddingResponse
  52. err = common.Unmarshal(responseBody, &geminiResponse)
  53. if err != nil {
  54. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  55. }
  56. } else {
  57. var geminiResponse dto.GeminiEmbeddingResponse
  58. err = common.Unmarshal(responseBody, &geminiResponse)
  59. if err != nil {
  60. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  61. }
  62. }
  63. service.IOCopyBytesGracefully(c, resp, responseBody)
  64. return usage, nil
  65. }
  66. func GeminiTextGenerationStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  67. helper.SetEventStreamHeaders(c)
  68. return geminiStreamHandler(c, info, resp, func(data string, geminiResponse *dto.GeminiChatResponse) bool {
  69. err := helper.StringData(c, data)
  70. if err != nil {
  71. logger.LogError(c, "failed to write stream data: "+err.Error())
  72. return false
  73. }
  74. info.SendResponseCount++
  75. return true
  76. })
  77. }