relay-gemini-native.go 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112
  1. package gemini
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "github.com/QuantumNous/new-api/common"
  7. "github.com/QuantumNous/new-api/constant"
  8. "github.com/QuantumNous/new-api/dto"
  9. "github.com/QuantumNous/new-api/logger"
  10. relaycommon "github.com/QuantumNous/new-api/relay/common"
  11. "github.com/QuantumNous/new-api/relay/helper"
  12. "github.com/QuantumNous/new-api/service"
  13. "github.com/QuantumNous/new-api/types"
  14. "github.com/gin-gonic/gin"
  15. )
  16. func GeminiTextGenerationHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  17. defer service.CloseResponseBodyGracefully(resp)
  18. // 读取响应体
  19. responseBody, err := io.ReadAll(resp.Body)
  20. if err != nil {
  21. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  22. }
  23. if common.DebugEnabled {
  24. println(string(responseBody))
  25. }
  26. // 解析为 Gemini 原生响应格式
  27. var geminiResponse dto.GeminiChatResponse
  28. err = common.Unmarshal(responseBody, &geminiResponse)
  29. if err != nil {
  30. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  31. }
  32. if len(geminiResponse.Candidates) == 0 && geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
  33. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
  34. }
  35. // 计算使用量(基于 UsageMetadata)
  36. usage := dto.Usage{
  37. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  38. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount + geminiResponse.UsageMetadata.ThoughtsTokenCount,
  39. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  40. }
  41. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  42. usage.PromptTokensDetails.CachedTokens = geminiResponse.UsageMetadata.CachedContentTokenCount
  43. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  44. if detail.Modality == "AUDIO" {
  45. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  46. } else if detail.Modality == "TEXT" {
  47. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  48. }
  49. }
  50. service.IOCopyBytesGracefully(c, resp, responseBody)
  51. return &usage, nil
  52. }
  53. func NativeGeminiEmbeddingHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.Usage, *types.NewAPIError) {
  54. defer service.CloseResponseBodyGracefully(resp)
  55. responseBody, err := io.ReadAll(resp.Body)
  56. if err != nil {
  57. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  58. }
  59. if common.DebugEnabled {
  60. println(string(responseBody))
  61. }
  62. usage := service.ResponseText2Usage(c, "", info.UpstreamModelName, info.GetEstimatePromptTokens())
  63. if info.IsGeminiBatchEmbedding {
  64. var geminiResponse dto.GeminiBatchEmbeddingResponse
  65. err = common.Unmarshal(responseBody, &geminiResponse)
  66. if err != nil {
  67. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  68. }
  69. } else {
  70. var geminiResponse dto.GeminiEmbeddingResponse
  71. err = common.Unmarshal(responseBody, &geminiResponse)
  72. if err != nil {
  73. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  74. }
  75. }
  76. service.IOCopyBytesGracefully(c, resp, responseBody)
  77. return usage, nil
  78. }
  79. func GeminiTextGenerationStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  80. helper.SetEventStreamHeaders(c)
  81. return geminiStreamHandler(c, info, resp, func(data string, geminiResponse *dto.GeminiChatResponse) bool {
  82. err := helper.StringData(c, data)
  83. if err != nil {
  84. logger.LogError(c, "failed to write stream data: "+err.Error())
  85. return false
  86. }
  87. info.SendResponseCount++
  88. return true
  89. })
  90. }