relay-responses.go 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. package relay
  2. import (
  3. "bytes"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "net/http"
  9. "one-api/common"
  10. "one-api/dto"
  11. relaycommon "one-api/relay/common"
  12. "one-api/relay/helper"
  13. "one-api/service"
  14. "one-api/setting"
  15. "one-api/setting/model_setting"
  16. "strings"
  17. "github.com/gin-gonic/gin"
  18. )
  19. func getAndValidateResponsesRequest(c *gin.Context, relayInfo *relaycommon.RelayInfo) (*dto.OpenAIResponsesRequest, error) {
  20. request := &dto.OpenAIResponsesRequest{}
  21. err := common.UnmarshalBodyReusable(c, request)
  22. if err != nil {
  23. return nil, err
  24. }
  25. if request.Model == "" {
  26. return nil, errors.New("model is required")
  27. }
  28. if len(request.Input) == 0 {
  29. return nil, errors.New("input is required")
  30. }
  31. relayInfo.IsStream = request.Stream
  32. return request, nil
  33. }
  34. func checkInputSensitive(textRequest *dto.OpenAIResponsesRequest, info *relaycommon.RelayInfo) ([]string, error) {
  35. sensitiveWords, err := service.CheckSensitiveInput(textRequest.Input)
  36. return sensitiveWords, err
  37. }
  38. func getInputTokens(req *dto.OpenAIResponsesRequest, info *relaycommon.RelayInfo) (int, error) {
  39. inputTokens, err := service.CountTokenInput(req.Input, req.Model)
  40. info.PromptTokens = inputTokens
  41. return inputTokens, err
  42. }
  43. func ResponsesHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) {
  44. relayInfo := relaycommon.GenRelayInfo(c)
  45. req, err := getAndValidateResponsesRequest(c, relayInfo)
  46. if err != nil {
  47. common.LogError(c, fmt.Sprintf("getAndValidateResponsesRequest error: %s", err.Error()))
  48. return service.OpenAIErrorWrapperLocal(err, "invalid_responses_request", http.StatusBadRequest)
  49. }
  50. if setting.ShouldCheckPromptSensitive() {
  51. sensitiveWords, err := checkInputSensitive(req, relayInfo)
  52. if err != nil {
  53. common.LogWarn(c, fmt.Sprintf("user sensitive words detected: %s", strings.Join(sensitiveWords, ", ")))
  54. return service.OpenAIErrorWrapperLocal(err, "check_request_sensitive_error", http.StatusBadRequest)
  55. }
  56. }
  57. err = helper.ModelMappedHelper(c, relayInfo)
  58. if err != nil {
  59. return service.OpenAIErrorWrapperLocal(err, "model_mapped_error", http.StatusBadRequest)
  60. }
  61. req.Model = relayInfo.UpstreamModelName
  62. if value, exists := c.Get("prompt_tokens"); exists {
  63. promptTokens := value.(int)
  64. relayInfo.SetPromptTokens(promptTokens)
  65. } else {
  66. promptTokens, err := getInputTokens(req, relayInfo)
  67. if err != nil {
  68. return service.OpenAIErrorWrapperLocal(err, "count_input_tokens_error", http.StatusBadRequest)
  69. }
  70. c.Set("prompt_tokens", promptTokens)
  71. }
  72. priceData, err := helper.ModelPriceHelper(c, relayInfo, relayInfo.PromptTokens, int(req.MaxOutputTokens))
  73. if err != nil {
  74. return service.OpenAIErrorWrapperLocal(err, "model_price_error", http.StatusInternalServerError)
  75. }
  76. // pre consume quota
  77. preConsumedQuota, userQuota, openaiErr := preConsumeQuota(c, priceData.ShouldPreConsumedQuota, relayInfo)
  78. if openaiErr != nil {
  79. return openaiErr
  80. }
  81. defer func() {
  82. if openaiErr != nil {
  83. returnPreConsumedQuota(c, relayInfo, userQuota, preConsumedQuota)
  84. }
  85. }()
  86. adaptor := GetAdaptor(relayInfo.ApiType)
  87. if adaptor == nil {
  88. return service.OpenAIErrorWrapperLocal(fmt.Errorf("invalid api type: %d", relayInfo.ApiType), "invalid_api_type", http.StatusBadRequest)
  89. }
  90. adaptor.Init(relayInfo)
  91. var requestBody io.Reader
  92. if model_setting.GetGlobalSettings().PassThroughRequestEnabled {
  93. body, err := common.GetRequestBody(c)
  94. if err != nil {
  95. return service.OpenAIErrorWrapperLocal(err, "get_request_body_error", http.StatusInternalServerError)
  96. }
  97. requestBody = bytes.NewBuffer(body)
  98. } else {
  99. convertedRequest, err := adaptor.ConvertOpenAIResponsesRequest(c, relayInfo, *req)
  100. if err != nil {
  101. return service.OpenAIErrorWrapperLocal(err, "convert_request_error", http.StatusBadRequest)
  102. }
  103. jsonData, err := json.Marshal(convertedRequest)
  104. if err != nil {
  105. return service.OpenAIErrorWrapperLocal(err, "marshal_request_error", http.StatusInternalServerError)
  106. }
  107. // apply param override
  108. if len(relayInfo.ParamOverride) > 0 {
  109. reqMap := make(map[string]interface{})
  110. err = json.Unmarshal(jsonData, &reqMap)
  111. if err != nil {
  112. return service.OpenAIErrorWrapperLocal(err, "param_override_unmarshal_failed", http.StatusInternalServerError)
  113. }
  114. for key, value := range relayInfo.ParamOverride {
  115. reqMap[key] = value
  116. }
  117. jsonData, err = json.Marshal(reqMap)
  118. if err != nil {
  119. return service.OpenAIErrorWrapperLocal(err, "param_override_marshal_failed", http.StatusInternalServerError)
  120. }
  121. }
  122. if common.DebugEnabled {
  123. println("requestBody: ", string(jsonData))
  124. }
  125. requestBody = bytes.NewBuffer(jsonData)
  126. }
  127. var httpResp *http.Response
  128. resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
  129. if err != nil {
  130. return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
  131. }
  132. statusCodeMappingStr := c.GetString("status_code_mapping")
  133. if resp != nil {
  134. httpResp = resp.(*http.Response)
  135. if httpResp.StatusCode != http.StatusOK {
  136. openaiErr = service.RelayErrorHandler(httpResp, false)
  137. // reset status code 重置状态码
  138. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  139. return openaiErr
  140. }
  141. }
  142. usage, openaiErr := adaptor.DoResponse(c, httpResp, relayInfo)
  143. if openaiErr != nil {
  144. // reset status code 重置状态码
  145. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  146. return openaiErr
  147. }
  148. if strings.HasPrefix(relayInfo.OriginModelName, "gpt-4o-audio") {
  149. service.PostAudioConsumeQuota(c, relayInfo, usage.(*dto.Usage), preConsumedQuota, userQuota, priceData, "")
  150. } else {
  151. postConsumeQuota(c, relayInfo, usage.(*dto.Usage), preConsumedQuota, userQuota, priceData, "")
  152. }
  153. return nil
  154. }