relay_embedding.go 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137
  1. package relay
  2. import (
  3. "bytes"
  4. "encoding/json"
  5. "fmt"
  6. "github.com/gin-gonic/gin"
  7. "net/http"
  8. "one-api/common"
  9. "one-api/dto"
  10. relaycommon "one-api/relay/common"
  11. relayconstant "one-api/relay/constant"
  12. "one-api/service"
  13. "one-api/setting"
  14. )
  15. func getEmbeddingPromptToken(embeddingRequest dto.EmbeddingRequest) int {
  16. token, _ := service.CountTokenInput(embeddingRequest.Input, embeddingRequest.Model)
  17. return token
  18. }
  19. func validateEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, embeddingRequest dto.EmbeddingRequest) error {
  20. if embeddingRequest.Input == nil {
  21. return fmt.Errorf("input is empty")
  22. }
  23. if info.RelayMode == relayconstant.RelayModeModerations && embeddingRequest.Model == "" {
  24. embeddingRequest.Model = "omni-moderation-latest"
  25. }
  26. if info.RelayMode == relayconstant.RelayModeEmbeddings && embeddingRequest.Model == "" {
  27. embeddingRequest.Model = c.Param("model")
  28. }
  29. return nil
  30. }
  31. func EmbeddingHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) {
  32. relayInfo := relaycommon.GenRelayInfo(c)
  33. var embeddingRequest *dto.EmbeddingRequest
  34. err := common.UnmarshalBodyReusable(c, &embeddingRequest)
  35. if err != nil {
  36. common.LogError(c, fmt.Sprintf("getAndValidateTextRequest failed: %s", err.Error()))
  37. return service.OpenAIErrorWrapperLocal(err, "invalid_text_request", http.StatusBadRequest)
  38. }
  39. err = validateEmbeddingRequest(c, relayInfo, *embeddingRequest)
  40. if err != nil {
  41. return service.OpenAIErrorWrapperLocal(err, "invalid_embedding_request", http.StatusBadRequest)
  42. }
  43. // map model name
  44. modelMapping := c.GetString("model_mapping")
  45. //isModelMapped := false
  46. if modelMapping != "" && modelMapping != "{}" {
  47. modelMap := make(map[string]string)
  48. err := json.Unmarshal([]byte(modelMapping), &modelMap)
  49. if err != nil {
  50. return service.OpenAIErrorWrapperLocal(err, "unmarshal_model_mapping_failed", http.StatusInternalServerError)
  51. }
  52. if modelMap[embeddingRequest.Model] != "" {
  53. embeddingRequest.Model = modelMap[embeddingRequest.Model]
  54. // set upstream model name
  55. //isModelMapped = true
  56. }
  57. }
  58. relayInfo.UpstreamModelName = embeddingRequest.Model
  59. modelPrice, success := common.GetModelPrice(embeddingRequest.Model, false)
  60. groupRatio := setting.GetGroupRatio(relayInfo.Group)
  61. var preConsumedQuota int
  62. var ratio float64
  63. var modelRatio float64
  64. promptToken := getEmbeddingPromptToken(*embeddingRequest)
  65. if !success {
  66. preConsumedTokens := promptToken
  67. modelRatio = common.GetModelRatio(embeddingRequest.Model)
  68. ratio = modelRatio * groupRatio
  69. preConsumedQuota = int(float64(preConsumedTokens) * ratio)
  70. } else {
  71. preConsumedQuota = int(modelPrice * common.QuotaPerUnit * groupRatio)
  72. }
  73. relayInfo.PromptTokens = promptToken
  74. // pre-consume quota 预消耗配额
  75. preConsumedQuota, userQuota, openaiErr := preConsumeQuota(c, preConsumedQuota, relayInfo)
  76. if openaiErr != nil {
  77. return openaiErr
  78. }
  79. defer func() {
  80. if openaiErr != nil {
  81. returnPreConsumedQuota(c, relayInfo, userQuota, preConsumedQuota)
  82. }
  83. }()
  84. adaptor := GetAdaptor(relayInfo.ApiType)
  85. if adaptor == nil {
  86. return service.OpenAIErrorWrapperLocal(fmt.Errorf("invalid api type: %d", relayInfo.ApiType), "invalid_api_type", http.StatusBadRequest)
  87. }
  88. adaptor.Init(relayInfo)
  89. convertedRequest, err := adaptor.ConvertEmbeddingRequest(c, relayInfo, *embeddingRequest)
  90. if err != nil {
  91. return service.OpenAIErrorWrapperLocal(err, "convert_request_failed", http.StatusInternalServerError)
  92. }
  93. jsonData, err := json.Marshal(convertedRequest)
  94. if err != nil {
  95. return service.OpenAIErrorWrapperLocal(err, "json_marshal_failed", http.StatusInternalServerError)
  96. }
  97. requestBody := bytes.NewBuffer(jsonData)
  98. statusCodeMappingStr := c.GetString("status_code_mapping")
  99. resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
  100. if err != nil {
  101. return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
  102. }
  103. var httpResp *http.Response
  104. if resp != nil {
  105. httpResp = resp.(*http.Response)
  106. if httpResp.StatusCode != http.StatusOK {
  107. openaiErr = service.RelayErrorHandler(httpResp)
  108. // reset status code 重置状态码
  109. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  110. return openaiErr
  111. }
  112. }
  113. usage, openaiErr := adaptor.DoResponse(c, httpResp, relayInfo)
  114. if openaiErr != nil {
  115. // reset status code 重置状态码
  116. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  117. return openaiErr
  118. }
  119. postConsumeQuota(c, relayInfo, embeddingRequest.Model, usage.(*dto.Usage), ratio, preConsumedQuota, userQuota, modelRatio, groupRatio, modelPrice, success, "")
  120. return nil
  121. }