adaptor.go 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261
  1. package vertex
  2. import (
  3. "encoding/json"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "net/http"
  8. "one-api/dto"
  9. "one-api/relay/channel"
  10. "one-api/relay/channel/claude"
  11. "one-api/relay/channel/gemini"
  12. "one-api/relay/channel/openai"
  13. relaycommon "one-api/relay/common"
  14. "one-api/relay/constant"
  15. "one-api/setting/model_setting"
  16. "strings"
  17. "github.com/gin-gonic/gin"
  18. )
  19. const (
  20. RequestModeClaude = 1
  21. RequestModeGemini = 2
  22. RequestModeLlama = 3
  23. )
  24. var claudeModelMap = map[string]string{
  25. "claude-3-sonnet-20240229": "claude-3-sonnet@20240229",
  26. "claude-3-opus-20240229": "claude-3-opus@20240229",
  27. "claude-3-haiku-20240307": "claude-3-haiku@20240307",
  28. "claude-3-5-sonnet-20240620": "claude-3-5-sonnet@20240620",
  29. "claude-3-5-sonnet-20241022": "claude-3-5-sonnet-v2@20241022",
  30. "claude-3-7-sonnet-20250219": "claude-3-7-sonnet@20250219",
  31. "claude-sonnet-4-20250514": "claude-sonnet-4@20250514",
  32. "claude-opus-4-20250514": "claude-opus-4@20250514",
  33. }
  34. const anthropicVersion = "vertex-2023-10-16"
  35. type Adaptor struct {
  36. RequestMode int
  37. AccountCredentials Credentials
  38. }
  39. func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) {
  40. if v, ok := claudeModelMap[info.UpstreamModelName]; ok {
  41. c.Set("request_model", v)
  42. } else {
  43. c.Set("request_model", request.Model)
  44. }
  45. vertexClaudeReq := copyRequest(request, anthropicVersion)
  46. return vertexClaudeReq, nil
  47. }
  48. func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
  49. //TODO implement me
  50. return nil, errors.New("not implemented")
  51. }
  52. func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
  53. //TODO implement me
  54. return nil, errors.New("not implemented")
  55. }
  56. func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
  57. if strings.HasPrefix(info.UpstreamModelName, "claude") {
  58. a.RequestMode = RequestModeClaude
  59. } else if strings.HasPrefix(info.UpstreamModelName, "gemini") {
  60. a.RequestMode = RequestModeGemini
  61. } else if strings.Contains(info.UpstreamModelName, "llama") {
  62. a.RequestMode = RequestModeLlama
  63. }
  64. }
  65. func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
  66. adc := &Credentials{}
  67. if err := json.Unmarshal([]byte(info.ApiKey), adc); err != nil {
  68. return "", fmt.Errorf("failed to decode credentials file: %w", err)
  69. }
  70. region := GetModelRegion(info.ApiVersion, info.OriginModelName)
  71. a.AccountCredentials = *adc
  72. suffix := ""
  73. if a.RequestMode == RequestModeGemini {
  74. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  75. // 新增逻辑:处理 -thinking-<budget> 格式
  76. if strings.Contains(info.UpstreamModelName, "-thinking-") {
  77. parts := strings.Split(info.UpstreamModelName, "-thinking-")
  78. info.UpstreamModelName = parts[0]
  79. } else if strings.HasSuffix(info.UpstreamModelName, "-thinking") { // 旧的适配
  80. info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-thinking")
  81. } else if strings.HasSuffix(info.UpstreamModelName, "-nothinking") {
  82. info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-nothinking")
  83. }
  84. }
  85. if info.IsStream {
  86. suffix = "streamGenerateContent?alt=sse"
  87. } else {
  88. suffix = "generateContent"
  89. }
  90. if region == "global" {
  91. return fmt.Sprintf(
  92. "https://aiplatform.googleapis.com/v1/projects/%s/locations/global/publishers/google/models/%s:%s",
  93. adc.ProjectID,
  94. info.UpstreamModelName,
  95. suffix,
  96. ), nil
  97. } else {
  98. return fmt.Sprintf(
  99. "https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/google/models/%s:%s",
  100. region,
  101. adc.ProjectID,
  102. region,
  103. info.UpstreamModelName,
  104. suffix,
  105. ), nil
  106. }
  107. } else if a.RequestMode == RequestModeClaude {
  108. if info.IsStream {
  109. suffix = "streamRawPredict?alt=sse"
  110. } else {
  111. suffix = "rawPredict"
  112. }
  113. model := info.UpstreamModelName
  114. if v, ok := claudeModelMap[info.UpstreamModelName]; ok {
  115. model = v
  116. }
  117. if region == "global" {
  118. return fmt.Sprintf(
  119. "https://aiplatform.googleapis.com/v1/projects/%s/locations/global/publishers/anthropic/models/%s:%s",
  120. adc.ProjectID,
  121. model,
  122. suffix,
  123. ), nil
  124. } else {
  125. return fmt.Sprintf(
  126. "https://%s-aiplatform.googleapis.com/v1/projects/%s/locations/%s/publishers/anthropic/models/%s:%s",
  127. region,
  128. adc.ProjectID,
  129. region,
  130. model,
  131. suffix,
  132. ), nil
  133. }
  134. } else if a.RequestMode == RequestModeLlama {
  135. return fmt.Sprintf(
  136. "https://%s-aiplatform.googleapis.com/v1beta1/projects/%s/locations/%s/endpoints/openapi/chat/completions",
  137. region,
  138. adc.ProjectID,
  139. region,
  140. ), nil
  141. }
  142. return "", errors.New("unsupported request mode")
  143. }
  144. func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *relaycommon.RelayInfo) error {
  145. channel.SetupApiRequestHeader(info, c, req)
  146. accessToken, err := getAccessToken(a, info)
  147. if err != nil {
  148. return err
  149. }
  150. req.Set("Authorization", "Bearer "+accessToken)
  151. return nil
  152. }
  153. func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
  154. if request == nil {
  155. return nil, errors.New("request is nil")
  156. }
  157. if a.RequestMode == RequestModeClaude {
  158. claudeReq, err := claude.RequestOpenAI2ClaudeMessage(*request)
  159. if err != nil {
  160. return nil, err
  161. }
  162. vertexClaudeReq := copyRequest(claudeReq, anthropicVersion)
  163. c.Set("request_model", claudeReq.Model)
  164. info.UpstreamModelName = claudeReq.Model
  165. return vertexClaudeReq, nil
  166. } else if a.RequestMode == RequestModeGemini {
  167. geminiRequest, err := gemini.CovertGemini2OpenAI(*request, info)
  168. if err != nil {
  169. return nil, err
  170. }
  171. c.Set("request_model", request.Model)
  172. return geminiRequest, nil
  173. } else if a.RequestMode == RequestModeLlama {
  174. return request, nil
  175. }
  176. return nil, errors.New("unsupported request mode")
  177. }
  178. func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
  179. return nil, nil
  180. }
  181. func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.EmbeddingRequest) (any, error) {
  182. //TODO implement me
  183. return nil, errors.New("not implemented")
  184. }
  185. func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
  186. // TODO implement me
  187. return nil, errors.New("not implemented")
  188. }
  189. func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) {
  190. return channel.DoApiRequest(a, c, info, requestBody)
  191. }
  192. func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *dto.OpenAIErrorWithStatusCode) {
  193. if info.IsStream {
  194. switch a.RequestMode {
  195. case RequestModeClaude:
  196. err, usage = claude.ClaudeStreamHandler(c, resp, info, claude.RequestModeMessage)
  197. case RequestModeGemini:
  198. if info.RelayMode == constant.RelayModeGemini {
  199. usage, err = gemini.GeminiTextGenerationStreamHandler(c, resp, info)
  200. } else {
  201. err, usage = gemini.GeminiChatStreamHandler(c, resp, info)
  202. }
  203. case RequestModeLlama:
  204. err, usage = openai.OaiStreamHandler(c, resp, info)
  205. }
  206. } else {
  207. switch a.RequestMode {
  208. case RequestModeClaude:
  209. err, usage = claude.ClaudeHandler(c, resp, claude.RequestModeMessage, info)
  210. case RequestModeGemini:
  211. if info.RelayMode == constant.RelayModeGemini {
  212. usage, err = gemini.GeminiTextGenerationHandler(c, resp, info)
  213. } else {
  214. err, usage = gemini.GeminiChatHandler(c, resp, info)
  215. }
  216. case RequestModeLlama:
  217. err, usage = openai.OpenaiHandler(c, resp, info)
  218. }
  219. }
  220. return
  221. }
  222. func (a *Adaptor) GetModelList() []string {
  223. var modelList []string
  224. for i, s := range ModelList {
  225. modelList = append(modelList, s)
  226. ModelList[i] = s
  227. }
  228. for i, s := range claude.ModelList {
  229. modelList = append(modelList, s)
  230. claude.ModelList[i] = s
  231. }
  232. for i, s := range gemini.ModelList {
  233. modelList = append(modelList, s)
  234. gemini.ModelList[i] = s
  235. }
  236. return modelList
  237. }
  238. func (a *Adaptor) GetChannelName() string {
  239. return ChannelName
  240. }