convert.go 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800
  1. package service
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "one-api/common"
  6. "one-api/constant"
  7. "one-api/dto"
  8. "one-api/relay/channel/openrouter"
  9. relaycommon "one-api/relay/common"
  10. "strings"
  11. )
  12. func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  13. openAIRequest := dto.GeneralOpenAIRequest{
  14. Model: claudeRequest.Model,
  15. MaxTokens: claudeRequest.MaxTokens,
  16. Temperature: claudeRequest.Temperature,
  17. TopP: claudeRequest.TopP,
  18. Stream: claudeRequest.Stream,
  19. }
  20. isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter
  21. if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" {
  22. if isOpenRouter {
  23. reasoning := openrouter.RequestReasoning{
  24. MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
  25. }
  26. reasoningJSON, err := json.Marshal(reasoning)
  27. if err != nil {
  28. return nil, fmt.Errorf("failed to marshal reasoning: %w", err)
  29. }
  30. openAIRequest.Reasoning = reasoningJSON
  31. } else {
  32. thinkingSuffix := "-thinking"
  33. if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
  34. !strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
  35. openAIRequest.Model = openAIRequest.Model + thinkingSuffix
  36. }
  37. }
  38. }
  39. // Convert stop sequences
  40. if len(claudeRequest.StopSequences) == 1 {
  41. openAIRequest.Stop = claudeRequest.StopSequences[0]
  42. } else if len(claudeRequest.StopSequences) > 1 {
  43. openAIRequest.Stop = claudeRequest.StopSequences
  44. }
  45. // Convert tools
  46. tools, _ := common.Any2Type[[]dto.Tool](claudeRequest.Tools)
  47. openAITools := make([]dto.ToolCallRequest, 0)
  48. for _, claudeTool := range tools {
  49. openAITool := dto.ToolCallRequest{
  50. Type: "function",
  51. Function: dto.FunctionRequest{
  52. Name: claudeTool.Name,
  53. Description: claudeTool.Description,
  54. Parameters: claudeTool.InputSchema,
  55. },
  56. }
  57. openAITools = append(openAITools, openAITool)
  58. }
  59. openAIRequest.Tools = openAITools
  60. // Convert messages
  61. openAIMessages := make([]dto.Message, 0)
  62. // Add system message if present
  63. if claudeRequest.System != nil {
  64. if claudeRequest.IsStringSystem() && claudeRequest.GetStringSystem() != "" {
  65. openAIMessage := dto.Message{
  66. Role: "system",
  67. }
  68. openAIMessage.SetStringContent(claudeRequest.GetStringSystem())
  69. openAIMessages = append(openAIMessages, openAIMessage)
  70. } else {
  71. systems := claudeRequest.ParseSystem()
  72. if len(systems) > 0 {
  73. openAIMessage := dto.Message{
  74. Role: "system",
  75. }
  76. isOpenRouterClaude := isOpenRouter && strings.HasPrefix(info.UpstreamModelName, "anthropic/claude")
  77. if isOpenRouterClaude {
  78. systemMediaMessages := make([]dto.MediaContent, 0, len(systems))
  79. for _, system := range systems {
  80. message := dto.MediaContent{
  81. Type: "text",
  82. Text: system.GetText(),
  83. CacheControl: system.CacheControl,
  84. }
  85. systemMediaMessages = append(systemMediaMessages, message)
  86. }
  87. openAIMessage.SetMediaContent(systemMediaMessages)
  88. } else {
  89. systemStr := ""
  90. for _, system := range systems {
  91. if system.Text != nil {
  92. systemStr += *system.Text
  93. }
  94. }
  95. openAIMessage.SetStringContent(systemStr)
  96. }
  97. openAIMessages = append(openAIMessages, openAIMessage)
  98. }
  99. }
  100. }
  101. for _, claudeMessage := range claudeRequest.Messages {
  102. openAIMessage := dto.Message{
  103. Role: claudeMessage.Role,
  104. }
  105. //log.Printf("claudeMessage.Content: %v", claudeMessage.Content)
  106. if claudeMessage.IsStringContent() {
  107. openAIMessage.SetStringContent(claudeMessage.GetStringContent())
  108. } else {
  109. content, err := claudeMessage.ParseContent()
  110. if err != nil {
  111. return nil, err
  112. }
  113. contents := content
  114. var toolCalls []dto.ToolCallRequest
  115. mediaMessages := make([]dto.MediaContent, 0, len(contents))
  116. for _, mediaMsg := range contents {
  117. switch mediaMsg.Type {
  118. case "text":
  119. message := dto.MediaContent{
  120. Type: "text",
  121. Text: mediaMsg.GetText(),
  122. CacheControl: mediaMsg.CacheControl,
  123. }
  124. mediaMessages = append(mediaMessages, message)
  125. case "image":
  126. // Handle image conversion (base64 to URL or keep as is)
  127. imageData := fmt.Sprintf("data:%s;base64,%s", mediaMsg.Source.MediaType, mediaMsg.Source.Data)
  128. //textContent += fmt.Sprintf("[Image: %s]", imageData)
  129. mediaMessage := dto.MediaContent{
  130. Type: "image_url",
  131. ImageUrl: &dto.MessageImageUrl{Url: imageData},
  132. }
  133. mediaMessages = append(mediaMessages, mediaMessage)
  134. case "tool_use":
  135. toolCall := dto.ToolCallRequest{
  136. ID: mediaMsg.Id,
  137. Type: "function",
  138. Function: dto.FunctionRequest{
  139. Name: mediaMsg.Name,
  140. Arguments: toJSONString(mediaMsg.Input),
  141. },
  142. }
  143. toolCalls = append(toolCalls, toolCall)
  144. case "tool_result":
  145. // Add tool result as a separate message
  146. oaiToolMessage := dto.Message{
  147. Role: "tool",
  148. Name: &mediaMsg.Name,
  149. ToolCallId: mediaMsg.ToolUseId,
  150. }
  151. //oaiToolMessage.SetStringContent(*mediaMsg.GetMediaContent().Text)
  152. if mediaMsg.IsStringContent() {
  153. oaiToolMessage.SetStringContent(mediaMsg.GetStringContent())
  154. } else {
  155. mediaContents := mediaMsg.ParseMediaContent()
  156. encodeJson, _ := common.Marshal(mediaContents)
  157. oaiToolMessage.SetStringContent(string(encodeJson))
  158. }
  159. openAIMessages = append(openAIMessages, oaiToolMessage)
  160. }
  161. }
  162. if len(toolCalls) > 0 {
  163. openAIMessage.SetToolCalls(toolCalls)
  164. }
  165. if len(mediaMessages) > 0 && len(toolCalls) == 0 {
  166. openAIMessage.SetMediaContent(mediaMessages)
  167. }
  168. }
  169. if len(openAIMessage.ParseContent()) > 0 || len(openAIMessage.ToolCalls) > 0 {
  170. openAIMessages = append(openAIMessages, openAIMessage)
  171. }
  172. }
  173. openAIRequest.Messages = openAIMessages
  174. return &openAIRequest, nil
  175. }
  176. func generateStopBlock(index int) *dto.ClaudeResponse {
  177. return &dto.ClaudeResponse{
  178. Type: "content_block_stop",
  179. Index: common.GetPointer[int](index),
  180. }
  181. }
  182. func StreamResponseOpenAI2Claude(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) []*dto.ClaudeResponse {
  183. var claudeResponses []*dto.ClaudeResponse
  184. if info.SendResponseCount == 1 {
  185. msg := &dto.ClaudeMediaMessage{
  186. Id: openAIResponse.Id,
  187. Model: openAIResponse.Model,
  188. Type: "message",
  189. Role: "assistant",
  190. Usage: &dto.ClaudeUsage{
  191. InputTokens: info.PromptTokens,
  192. OutputTokens: 0,
  193. },
  194. }
  195. msg.SetContent(make([]any, 0))
  196. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  197. Type: "message_start",
  198. Message: msg,
  199. })
  200. claudeResponses = append(claudeResponses)
  201. //claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  202. // Type: "ping",
  203. //})
  204. if openAIResponse.IsToolCall() {
  205. resp := &dto.ClaudeResponse{
  206. Type: "content_block_start",
  207. ContentBlock: &dto.ClaudeMediaMessage{
  208. Id: openAIResponse.GetFirstToolCall().ID,
  209. Type: "tool_use",
  210. Name: openAIResponse.GetFirstToolCall().Function.Name,
  211. },
  212. }
  213. resp.SetIndex(0)
  214. claudeResponses = append(claudeResponses, resp)
  215. } else {
  216. }
  217. // 判断首个响应是否存在内容(非标准的 OpenAI 响应)
  218. if len(openAIResponse.Choices) > 0 && len(openAIResponse.Choices[0].Delta.GetContentString()) > 0 {
  219. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  220. Index: &info.ClaudeConvertInfo.Index,
  221. Type: "content_block_start",
  222. ContentBlock: &dto.ClaudeMediaMessage{
  223. Type: "text",
  224. Text: common.GetPointer[string](""),
  225. },
  226. })
  227. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  228. Type: "content_block_delta",
  229. Delta: &dto.ClaudeMediaMessage{
  230. Type: "text",
  231. Text: common.GetPointer[string](openAIResponse.Choices[0].Delta.GetContentString()),
  232. },
  233. })
  234. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  235. }
  236. return claudeResponses
  237. }
  238. if len(openAIResponse.Choices) == 0 {
  239. // no choices
  240. // 可能为非标准的 OpenAI 响应,判断是否已经完成
  241. if info.Done {
  242. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  243. oaiUsage := info.ClaudeConvertInfo.Usage
  244. if oaiUsage != nil {
  245. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  246. Type: "message_delta",
  247. Usage: &dto.ClaudeUsage{
  248. InputTokens: oaiUsage.PromptTokens,
  249. OutputTokens: oaiUsage.CompletionTokens,
  250. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  251. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  252. },
  253. Delta: &dto.ClaudeMediaMessage{
  254. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  255. },
  256. })
  257. }
  258. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  259. Type: "message_stop",
  260. })
  261. }
  262. return claudeResponses
  263. } else {
  264. chosenChoice := openAIResponse.Choices[0]
  265. if chosenChoice.FinishReason != nil && *chosenChoice.FinishReason != "" {
  266. // should be done
  267. info.FinishReason = *chosenChoice.FinishReason
  268. return claudeResponses
  269. }
  270. if info.Done {
  271. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  272. oaiUsage := info.ClaudeConvertInfo.Usage
  273. if oaiUsage != nil {
  274. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  275. Type: "message_delta",
  276. Usage: &dto.ClaudeUsage{
  277. InputTokens: oaiUsage.PromptTokens,
  278. OutputTokens: oaiUsage.CompletionTokens,
  279. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  280. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  281. },
  282. Delta: &dto.ClaudeMediaMessage{
  283. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  284. },
  285. })
  286. }
  287. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  288. Type: "message_stop",
  289. })
  290. } else {
  291. var claudeResponse dto.ClaudeResponse
  292. var isEmpty bool
  293. claudeResponse.Type = "content_block_delta"
  294. if len(chosenChoice.Delta.ToolCalls) > 0 {
  295. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeTools {
  296. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  297. info.ClaudeConvertInfo.Index++
  298. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  299. Index: &info.ClaudeConvertInfo.Index,
  300. Type: "content_block_start",
  301. ContentBlock: &dto.ClaudeMediaMessage{
  302. Id: openAIResponse.GetFirstToolCall().ID,
  303. Type: "tool_use",
  304. Name: openAIResponse.GetFirstToolCall().Function.Name,
  305. Input: map[string]interface{}{},
  306. },
  307. })
  308. }
  309. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools
  310. // tools delta
  311. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  312. Type: "input_json_delta",
  313. PartialJson: &chosenChoice.Delta.ToolCalls[0].Function.Arguments,
  314. }
  315. } else {
  316. reasoning := chosenChoice.Delta.GetReasoningContent()
  317. textContent := chosenChoice.Delta.GetContentString()
  318. if reasoning != "" || textContent != "" {
  319. if reasoning != "" {
  320. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeThinking {
  321. //info.ClaudeConvertInfo.Index++
  322. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  323. Index: &info.ClaudeConvertInfo.Index,
  324. Type: "content_block_start",
  325. ContentBlock: &dto.ClaudeMediaMessage{
  326. Type: "thinking",
  327. Thinking: "",
  328. },
  329. })
  330. }
  331. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking
  332. // text delta
  333. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  334. Type: "thinking_delta",
  335. Thinking: reasoning,
  336. }
  337. } else {
  338. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeText {
  339. if info.LastMessagesType == relaycommon.LastMessageTypeThinking || info.LastMessagesType == relaycommon.LastMessageTypeTools {
  340. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  341. info.ClaudeConvertInfo.Index++
  342. }
  343. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  344. Index: &info.ClaudeConvertInfo.Index,
  345. Type: "content_block_start",
  346. ContentBlock: &dto.ClaudeMediaMessage{
  347. Type: "text",
  348. Text: common.GetPointer[string](""),
  349. },
  350. })
  351. }
  352. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  353. // text delta
  354. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  355. Type: "text_delta",
  356. Text: common.GetPointer[string](textContent),
  357. }
  358. }
  359. } else {
  360. isEmpty = true
  361. }
  362. }
  363. claudeResponse.Index = &info.ClaudeConvertInfo.Index
  364. if !isEmpty {
  365. claudeResponses = append(claudeResponses, &claudeResponse)
  366. }
  367. }
  368. }
  369. return claudeResponses
  370. }
  371. func ResponseOpenAI2Claude(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.ClaudeResponse {
  372. var stopReason string
  373. contents := make([]dto.ClaudeMediaMessage, 0)
  374. claudeResponse := &dto.ClaudeResponse{
  375. Id: openAIResponse.Id,
  376. Type: "message",
  377. Role: "assistant",
  378. Model: openAIResponse.Model,
  379. }
  380. for _, choice := range openAIResponse.Choices {
  381. stopReason = stopReasonOpenAI2Claude(choice.FinishReason)
  382. claudeContent := dto.ClaudeMediaMessage{}
  383. if choice.FinishReason == "tool_calls" {
  384. claudeContent.Type = "tool_use"
  385. claudeContent.Id = choice.Message.ToolCallId
  386. claudeContent.Name = choice.Message.ParseToolCalls()[0].Function.Name
  387. var mapParams map[string]interface{}
  388. if err := json.Unmarshal([]byte(choice.Message.ParseToolCalls()[0].Function.Arguments), &mapParams); err == nil {
  389. claudeContent.Input = mapParams
  390. } else {
  391. claudeContent.Input = choice.Message.ParseToolCalls()[0].Function.Arguments
  392. }
  393. } else {
  394. claudeContent.Type = "text"
  395. claudeContent.SetText(choice.Message.StringContent())
  396. }
  397. contents = append(contents, claudeContent)
  398. }
  399. claudeResponse.Content = contents
  400. claudeResponse.StopReason = stopReason
  401. claudeResponse.Usage = &dto.ClaudeUsage{
  402. InputTokens: openAIResponse.PromptTokens,
  403. OutputTokens: openAIResponse.CompletionTokens,
  404. }
  405. return claudeResponse
  406. }
  407. func stopReasonOpenAI2Claude(reason string) string {
  408. switch reason {
  409. case "stop":
  410. return "end_turn"
  411. case "stop_sequence":
  412. return "stop_sequence"
  413. case "max_tokens":
  414. return "max_tokens"
  415. case "tool_calls":
  416. return "tool_use"
  417. default:
  418. return reason
  419. }
  420. }
  421. func toJSONString(v interface{}) string {
  422. b, err := json.Marshal(v)
  423. if err != nil {
  424. return "{}"
  425. }
  426. return string(b)
  427. }
  428. func GeminiToOpenAIRequest(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  429. openaiRequest := &dto.GeneralOpenAIRequest{
  430. Model: info.UpstreamModelName,
  431. Stream: info.IsStream,
  432. }
  433. // 转换 messages
  434. var messages []dto.Message
  435. for _, content := range geminiRequest.Contents {
  436. message := dto.Message{
  437. Role: convertGeminiRoleToOpenAI(content.Role),
  438. }
  439. // 处理 parts
  440. var mediaContents []dto.MediaContent
  441. var toolCalls []dto.ToolCallRequest
  442. for _, part := range content.Parts {
  443. if part.Text != "" {
  444. mediaContent := dto.MediaContent{
  445. Type: "text",
  446. Text: part.Text,
  447. }
  448. mediaContents = append(mediaContents, mediaContent)
  449. } else if part.InlineData != nil {
  450. mediaContent := dto.MediaContent{
  451. Type: "image_url",
  452. ImageUrl: &dto.MessageImageUrl{
  453. Url: fmt.Sprintf("data:%s;base64,%s", part.InlineData.MimeType, part.InlineData.Data),
  454. Detail: "auto",
  455. MimeType: part.InlineData.MimeType,
  456. },
  457. }
  458. mediaContents = append(mediaContents, mediaContent)
  459. } else if part.FileData != nil {
  460. mediaContent := dto.MediaContent{
  461. Type: "image_url",
  462. ImageUrl: &dto.MessageImageUrl{
  463. Url: part.FileData.FileUri,
  464. Detail: "auto",
  465. MimeType: part.FileData.MimeType,
  466. },
  467. }
  468. mediaContents = append(mediaContents, mediaContent)
  469. } else if part.FunctionCall != nil {
  470. // 处理 Gemini 的工具调用
  471. toolCall := dto.ToolCallRequest{
  472. ID: fmt.Sprintf("call_%d", len(toolCalls)+1), // 生成唯一ID
  473. Type: "function",
  474. Function: dto.FunctionRequest{
  475. Name: part.FunctionCall.FunctionName,
  476. Arguments: toJSONString(part.FunctionCall.Arguments),
  477. },
  478. }
  479. toolCalls = append(toolCalls, toolCall)
  480. } else if part.FunctionResponse != nil {
  481. // 处理 Gemini 的工具响应,创建单独的 tool 消息
  482. toolMessage := dto.Message{
  483. Role: "tool",
  484. ToolCallId: fmt.Sprintf("call_%d", len(toolCalls)), // 使用对应的调用ID
  485. }
  486. toolMessage.SetStringContent(toJSONString(part.FunctionResponse.Response))
  487. messages = append(messages, toolMessage)
  488. }
  489. }
  490. // 设置消息内容
  491. if len(toolCalls) > 0 {
  492. // 如果有工具调用,设置工具调用
  493. message.SetToolCalls(toolCalls)
  494. } else if len(mediaContents) == 1 && mediaContents[0].Type == "text" {
  495. // 如果只有一个文本内容,直接设置字符串
  496. message.Content = mediaContents[0].Text
  497. } else if len(mediaContents) > 0 {
  498. // 如果有多个内容或包含媒体,设置为数组
  499. message.SetMediaContent(mediaContents)
  500. }
  501. // 只有当消息有内容或工具调用时才添加
  502. if len(message.ParseContent()) > 0 || len(message.ToolCalls) > 0 {
  503. messages = append(messages, message)
  504. }
  505. }
  506. openaiRequest.Messages = messages
  507. if geminiRequest.GenerationConfig.Temperature != nil {
  508. openaiRequest.Temperature = geminiRequest.GenerationConfig.Temperature
  509. }
  510. if geminiRequest.GenerationConfig.TopP > 0 {
  511. openaiRequest.TopP = geminiRequest.GenerationConfig.TopP
  512. }
  513. if geminiRequest.GenerationConfig.TopK > 0 {
  514. openaiRequest.TopK = int(geminiRequest.GenerationConfig.TopK)
  515. }
  516. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  517. openaiRequest.MaxTokens = geminiRequest.GenerationConfig.MaxOutputTokens
  518. }
  519. // gemini stop sequences 最多 5 个,openai stop 最多 4 个
  520. if len(geminiRequest.GenerationConfig.StopSequences) > 0 {
  521. openaiRequest.Stop = geminiRequest.GenerationConfig.StopSequences[:4]
  522. }
  523. if geminiRequest.GenerationConfig.CandidateCount > 0 {
  524. openaiRequest.N = geminiRequest.GenerationConfig.CandidateCount
  525. }
  526. // 转换工具调用
  527. if len(geminiRequest.Tools) > 0 {
  528. var tools []dto.ToolCallRequest
  529. for _, tool := range geminiRequest.Tools {
  530. if tool.FunctionDeclarations != nil {
  531. // 将 Gemini 的 FunctionDeclarations 转换为 OpenAI 的 ToolCallRequest
  532. functionDeclarations, ok := tool.FunctionDeclarations.([]dto.FunctionRequest)
  533. if ok {
  534. for _, function := range functionDeclarations {
  535. openAITool := dto.ToolCallRequest{
  536. Type: "function",
  537. Function: dto.FunctionRequest{
  538. Name: function.Name,
  539. Description: function.Description,
  540. Parameters: function.Parameters,
  541. },
  542. }
  543. tools = append(tools, openAITool)
  544. }
  545. }
  546. }
  547. }
  548. if len(tools) > 0 {
  549. openaiRequest.Tools = tools
  550. }
  551. }
  552. // gemini system instructions
  553. if geminiRequest.SystemInstructions != nil {
  554. // 将系统指令作为第一条消息插入
  555. systemMessage := dto.Message{
  556. Role: "system",
  557. Content: extractTextFromGeminiParts(geminiRequest.SystemInstructions.Parts),
  558. }
  559. openaiRequest.Messages = append([]dto.Message{systemMessage}, openaiRequest.Messages...)
  560. }
  561. return openaiRequest, nil
  562. }
  563. func convertGeminiRoleToOpenAI(geminiRole string) string {
  564. switch geminiRole {
  565. case "user":
  566. return "user"
  567. case "model":
  568. return "assistant"
  569. case "function":
  570. return "function"
  571. default:
  572. return "user"
  573. }
  574. }
  575. func extractTextFromGeminiParts(parts []dto.GeminiPart) string {
  576. var texts []string
  577. for _, part := range parts {
  578. if part.Text != "" {
  579. texts = append(texts, part.Text)
  580. }
  581. }
  582. return strings.Join(texts, "\n")
  583. }
  584. // ResponseOpenAI2Gemini 将 OpenAI 响应转换为 Gemini 格式
  585. func ResponseOpenAI2Gemini(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  586. geminiResponse := &dto.GeminiChatResponse{
  587. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  588. PromptFeedback: dto.GeminiChatPromptFeedback{
  589. SafetyRatings: []dto.GeminiChatSafetyRating{},
  590. },
  591. UsageMetadata: dto.GeminiUsageMetadata{
  592. PromptTokenCount: openAIResponse.PromptTokens,
  593. CandidatesTokenCount: openAIResponse.CompletionTokens,
  594. TotalTokenCount: openAIResponse.PromptTokens + openAIResponse.CompletionTokens,
  595. },
  596. }
  597. for _, choice := range openAIResponse.Choices {
  598. candidate := dto.GeminiChatCandidate{
  599. Index: int64(choice.Index),
  600. SafetyRatings: []dto.GeminiChatSafetyRating{},
  601. }
  602. // 设置结束原因
  603. var finishReason string
  604. switch choice.FinishReason {
  605. case "stop":
  606. finishReason = "STOP"
  607. case "length":
  608. finishReason = "MAX_TOKENS"
  609. case "content_filter":
  610. finishReason = "SAFETY"
  611. case "tool_calls":
  612. finishReason = "STOP"
  613. default:
  614. finishReason = "STOP"
  615. }
  616. candidate.FinishReason = &finishReason
  617. // 转换消息内容
  618. content := dto.GeminiChatContent{
  619. Role: "model",
  620. Parts: make([]dto.GeminiPart, 0),
  621. }
  622. // 处理工具调用
  623. toolCalls := choice.Message.ParseToolCalls()
  624. if len(toolCalls) > 0 {
  625. for _, toolCall := range toolCalls {
  626. // 解析参数
  627. var args map[string]interface{}
  628. if toolCall.Function.Arguments != "" {
  629. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  630. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  631. }
  632. } else {
  633. args = make(map[string]interface{})
  634. }
  635. part := dto.GeminiPart{
  636. FunctionCall: &dto.FunctionCall{
  637. FunctionName: toolCall.Function.Name,
  638. Arguments: args,
  639. },
  640. }
  641. content.Parts = append(content.Parts, part)
  642. }
  643. } else {
  644. // 处理文本内容
  645. textContent := choice.Message.StringContent()
  646. if textContent != "" {
  647. part := dto.GeminiPart{
  648. Text: textContent,
  649. }
  650. content.Parts = append(content.Parts, part)
  651. }
  652. }
  653. candidate.Content = content
  654. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  655. }
  656. return geminiResponse
  657. }
  658. // StreamResponseOpenAI2Gemini 将 OpenAI 流式响应转换为 Gemini 格式
  659. func StreamResponseOpenAI2Gemini(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  660. // 检查是否有实际内容或结束标志
  661. hasContent := false
  662. hasFinishReason := false
  663. for _, choice := range openAIResponse.Choices {
  664. if len(choice.Delta.GetContentString()) > 0 || (choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0) {
  665. hasContent = true
  666. }
  667. if choice.FinishReason != nil {
  668. hasFinishReason = true
  669. }
  670. }
  671. // 如果没有实际内容且没有结束标志,跳过。主要针对 openai 流响应开头的空数据
  672. if !hasContent && !hasFinishReason {
  673. return nil
  674. }
  675. geminiResponse := &dto.GeminiChatResponse{
  676. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  677. PromptFeedback: dto.GeminiChatPromptFeedback{
  678. SafetyRatings: []dto.GeminiChatSafetyRating{},
  679. },
  680. UsageMetadata: dto.GeminiUsageMetadata{
  681. PromptTokenCount: info.PromptTokens,
  682. CandidatesTokenCount: 0, // 流式响应中可能没有完整的 usage 信息
  683. TotalTokenCount: info.PromptTokens,
  684. },
  685. }
  686. for _, choice := range openAIResponse.Choices {
  687. candidate := dto.GeminiChatCandidate{
  688. Index: int64(choice.Index),
  689. SafetyRatings: []dto.GeminiChatSafetyRating{},
  690. }
  691. // 设置结束原因
  692. if choice.FinishReason != nil {
  693. var finishReason string
  694. switch *choice.FinishReason {
  695. case "stop":
  696. finishReason = "STOP"
  697. case "length":
  698. finishReason = "MAX_TOKENS"
  699. case "content_filter":
  700. finishReason = "SAFETY"
  701. case "tool_calls":
  702. finishReason = "STOP"
  703. default:
  704. finishReason = "STOP"
  705. }
  706. candidate.FinishReason = &finishReason
  707. }
  708. // 转换消息内容
  709. content := dto.GeminiChatContent{
  710. Role: "model",
  711. Parts: make([]dto.GeminiPart, 0),
  712. }
  713. // 处理工具调用
  714. if choice.Delta.ToolCalls != nil {
  715. for _, toolCall := range choice.Delta.ToolCalls {
  716. // 解析参数
  717. var args map[string]interface{}
  718. if toolCall.Function.Arguments != "" {
  719. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  720. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  721. }
  722. } else {
  723. args = make(map[string]interface{})
  724. }
  725. part := dto.GeminiPart{
  726. FunctionCall: &dto.FunctionCall{
  727. FunctionName: toolCall.Function.Name,
  728. Arguments: args,
  729. },
  730. }
  731. content.Parts = append(content.Parts, part)
  732. }
  733. } else {
  734. // 处理文本内容
  735. textContent := choice.Delta.GetContentString()
  736. if textContent != "" {
  737. part := dto.GeminiPart{
  738. Text: textContent,
  739. }
  740. content.Parts = append(content.Parts, part)
  741. }
  742. }
  743. candidate.Content = content
  744. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  745. }
  746. return geminiResponse
  747. }