convert.go 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804
  1. package service
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "one-api/common"
  6. "one-api/constant"
  7. "one-api/dto"
  8. "one-api/relay/channel/openrouter"
  9. relaycommon "one-api/relay/common"
  10. "strings"
  11. )
  12. func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  13. openAIRequest := dto.GeneralOpenAIRequest{
  14. Model: claudeRequest.Model,
  15. MaxTokens: claudeRequest.MaxTokens,
  16. Temperature: claudeRequest.Temperature,
  17. TopP: claudeRequest.TopP,
  18. Stream: claudeRequest.Stream,
  19. }
  20. isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter
  21. if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" {
  22. if isOpenRouter {
  23. reasoning := openrouter.RequestReasoning{
  24. MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
  25. }
  26. reasoningJSON, err := json.Marshal(reasoning)
  27. if err != nil {
  28. return nil, fmt.Errorf("failed to marshal reasoning: %w", err)
  29. }
  30. openAIRequest.Reasoning = reasoningJSON
  31. } else {
  32. thinkingSuffix := "-thinking"
  33. if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
  34. !strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
  35. openAIRequest.Model = openAIRequest.Model + thinkingSuffix
  36. }
  37. }
  38. }
  39. // Convert stop sequences
  40. if len(claudeRequest.StopSequences) == 1 {
  41. openAIRequest.Stop = claudeRequest.StopSequences[0]
  42. } else if len(claudeRequest.StopSequences) > 1 {
  43. openAIRequest.Stop = claudeRequest.StopSequences
  44. }
  45. // Convert tools
  46. tools, _ := common.Any2Type[[]dto.Tool](claudeRequest.Tools)
  47. openAITools := make([]dto.ToolCallRequest, 0)
  48. for _, claudeTool := range tools {
  49. openAITool := dto.ToolCallRequest{
  50. Type: "function",
  51. Function: dto.FunctionRequest{
  52. Name: claudeTool.Name,
  53. Description: claudeTool.Description,
  54. Parameters: claudeTool.InputSchema,
  55. },
  56. }
  57. openAITools = append(openAITools, openAITool)
  58. }
  59. openAIRequest.Tools = openAITools
  60. // Convert messages
  61. openAIMessages := make([]dto.Message, 0)
  62. // Add system message if present
  63. if claudeRequest.System != nil {
  64. if claudeRequest.IsStringSystem() && claudeRequest.GetStringSystem() != "" {
  65. openAIMessage := dto.Message{
  66. Role: "system",
  67. }
  68. openAIMessage.SetStringContent(claudeRequest.GetStringSystem())
  69. openAIMessages = append(openAIMessages, openAIMessage)
  70. } else {
  71. systems := claudeRequest.ParseSystem()
  72. if len(systems) > 0 {
  73. openAIMessage := dto.Message{
  74. Role: "system",
  75. }
  76. isOpenRouterClaude := isOpenRouter && strings.HasPrefix(info.UpstreamModelName, "anthropic/claude")
  77. if isOpenRouterClaude {
  78. systemMediaMessages := make([]dto.MediaContent, 0, len(systems))
  79. for _, system := range systems {
  80. message := dto.MediaContent{
  81. Type: "text",
  82. Text: system.GetText(),
  83. CacheControl: system.CacheControl,
  84. }
  85. systemMediaMessages = append(systemMediaMessages, message)
  86. }
  87. openAIMessage.SetMediaContent(systemMediaMessages)
  88. } else {
  89. systemStr := ""
  90. for _, system := range systems {
  91. if system.Text != nil {
  92. systemStr += *system.Text
  93. }
  94. }
  95. openAIMessage.SetStringContent(systemStr)
  96. }
  97. openAIMessages = append(openAIMessages, openAIMessage)
  98. }
  99. }
  100. }
  101. for _, claudeMessage := range claudeRequest.Messages {
  102. openAIMessage := dto.Message{
  103. Role: claudeMessage.Role,
  104. }
  105. //log.Printf("claudeMessage.Content: %v", claudeMessage.Content)
  106. if claudeMessage.IsStringContent() {
  107. openAIMessage.SetStringContent(claudeMessage.GetStringContent())
  108. } else {
  109. content, err := claudeMessage.ParseContent()
  110. if err != nil {
  111. return nil, err
  112. }
  113. contents := content
  114. var toolCalls []dto.ToolCallRequest
  115. mediaMessages := make([]dto.MediaContent, 0, len(contents))
  116. for _, mediaMsg := range contents {
  117. switch mediaMsg.Type {
  118. case "text":
  119. message := dto.MediaContent{
  120. Type: "text",
  121. Text: mediaMsg.GetText(),
  122. CacheControl: mediaMsg.CacheControl,
  123. }
  124. mediaMessages = append(mediaMessages, message)
  125. case "image":
  126. // Handle image conversion (base64 to URL or keep as is)
  127. imageData := fmt.Sprintf("data:%s;base64,%s", mediaMsg.Source.MediaType, mediaMsg.Source.Data)
  128. //textContent += fmt.Sprintf("[Image: %s]", imageData)
  129. mediaMessage := dto.MediaContent{
  130. Type: "image_url",
  131. ImageUrl: &dto.MessageImageUrl{Url: imageData},
  132. }
  133. mediaMessages = append(mediaMessages, mediaMessage)
  134. case "tool_use":
  135. toolCall := dto.ToolCallRequest{
  136. ID: mediaMsg.Id,
  137. Type: "function",
  138. Function: dto.FunctionRequest{
  139. Name: mediaMsg.Name,
  140. Arguments: toJSONString(mediaMsg.Input),
  141. },
  142. }
  143. toolCalls = append(toolCalls, toolCall)
  144. case "tool_result":
  145. // Add tool result as a separate message
  146. oaiToolMessage := dto.Message{
  147. Role: "tool",
  148. Name: &mediaMsg.Name,
  149. ToolCallId: mediaMsg.ToolUseId,
  150. }
  151. //oaiToolMessage.SetStringContent(*mediaMsg.GetMediaContent().Text)
  152. if mediaMsg.IsStringContent() {
  153. oaiToolMessage.SetStringContent(mediaMsg.GetStringContent())
  154. } else {
  155. mediaContents := mediaMsg.ParseMediaContent()
  156. encodeJson, _ := common.Marshal(mediaContents)
  157. oaiToolMessage.SetStringContent(string(encodeJson))
  158. }
  159. openAIMessages = append(openAIMessages, oaiToolMessage)
  160. }
  161. }
  162. if len(toolCalls) > 0 {
  163. openAIMessage.SetToolCalls(toolCalls)
  164. }
  165. if len(mediaMessages) > 0 && len(toolCalls) == 0 {
  166. openAIMessage.SetMediaContent(mediaMessages)
  167. }
  168. }
  169. if len(openAIMessage.ParseContent()) > 0 || len(openAIMessage.ToolCalls) > 0 {
  170. openAIMessages = append(openAIMessages, openAIMessage)
  171. }
  172. }
  173. openAIRequest.Messages = openAIMessages
  174. return &openAIRequest, nil
  175. }
  176. func generateStopBlock(index int) *dto.ClaudeResponse {
  177. return &dto.ClaudeResponse{
  178. Type: "content_block_stop",
  179. Index: common.GetPointer[int](index),
  180. }
  181. }
  182. func StreamResponseOpenAI2Claude(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) []*dto.ClaudeResponse {
  183. var claudeResponses []*dto.ClaudeResponse
  184. if info.SendResponseCount == 1 {
  185. msg := &dto.ClaudeMediaMessage{
  186. Id: openAIResponse.Id,
  187. Model: openAIResponse.Model,
  188. Type: "message",
  189. Role: "assistant",
  190. Usage: &dto.ClaudeUsage{
  191. InputTokens: info.PromptTokens,
  192. OutputTokens: 0,
  193. },
  194. }
  195. msg.SetContent(make([]any, 0))
  196. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  197. Type: "message_start",
  198. Message: msg,
  199. })
  200. claudeResponses = append(claudeResponses)
  201. //claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  202. // Type: "ping",
  203. //})
  204. if openAIResponse.IsToolCall() {
  205. resp := &dto.ClaudeResponse{
  206. Type: "content_block_start",
  207. ContentBlock: &dto.ClaudeMediaMessage{
  208. Id: openAIResponse.GetFirstToolCall().ID,
  209. Type: "tool_use",
  210. Name: openAIResponse.GetFirstToolCall().Function.Name,
  211. },
  212. }
  213. resp.SetIndex(0)
  214. claudeResponses = append(claudeResponses, resp)
  215. } else {
  216. }
  217. // 判断首个响应是否存在内容(非标准的 OpenAI 响应)
  218. if len(openAIResponse.Choices) > 0 && len(openAIResponse.Choices[0].Delta.GetContentString()) > 0 {
  219. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  220. Index: &info.ClaudeConvertInfo.Index,
  221. Type: "content_block_start",
  222. ContentBlock: &dto.ClaudeMediaMessage{
  223. Type: "text",
  224. Text: common.GetPointer[string](""),
  225. },
  226. })
  227. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  228. Type: "content_block_delta",
  229. Delta: &dto.ClaudeMediaMessage{
  230. Type: "text",
  231. Text: common.GetPointer[string](openAIResponse.Choices[0].Delta.GetContentString()),
  232. },
  233. })
  234. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  235. }
  236. return claudeResponses
  237. }
  238. if len(openAIResponse.Choices) == 0 {
  239. // no choices
  240. // 可能为非标准的 OpenAI 响应,判断是否已经完成
  241. if info.Done {
  242. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  243. oaiUsage := info.ClaudeConvertInfo.Usage
  244. if oaiUsage != nil {
  245. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  246. Type: "message_delta",
  247. Usage: &dto.ClaudeUsage{
  248. InputTokens: oaiUsage.PromptTokens,
  249. OutputTokens: oaiUsage.CompletionTokens,
  250. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  251. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  252. },
  253. Delta: &dto.ClaudeMediaMessage{
  254. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  255. },
  256. })
  257. }
  258. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  259. Type: "message_stop",
  260. })
  261. }
  262. return claudeResponses
  263. } else {
  264. chosenChoice := openAIResponse.Choices[0]
  265. if chosenChoice.FinishReason != nil && *chosenChoice.FinishReason != "" {
  266. // should be done
  267. info.FinishReason = *chosenChoice.FinishReason
  268. if !info.Done {
  269. return claudeResponses
  270. }
  271. }
  272. if info.Done {
  273. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  274. oaiUsage := info.ClaudeConvertInfo.Usage
  275. if oaiUsage != nil {
  276. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  277. Type: "message_delta",
  278. Usage: &dto.ClaudeUsage{
  279. InputTokens: oaiUsage.PromptTokens,
  280. OutputTokens: oaiUsage.CompletionTokens,
  281. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  282. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  283. },
  284. Delta: &dto.ClaudeMediaMessage{
  285. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  286. },
  287. })
  288. }
  289. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  290. Type: "message_stop",
  291. })
  292. } else {
  293. var claudeResponse dto.ClaudeResponse
  294. var isEmpty bool
  295. claudeResponse.Type = "content_block_delta"
  296. if len(chosenChoice.Delta.ToolCalls) > 0 {
  297. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeTools {
  298. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  299. info.ClaudeConvertInfo.Index++
  300. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  301. Index: &info.ClaudeConvertInfo.Index,
  302. Type: "content_block_start",
  303. ContentBlock: &dto.ClaudeMediaMessage{
  304. Id: openAIResponse.GetFirstToolCall().ID,
  305. Type: "tool_use",
  306. Name: openAIResponse.GetFirstToolCall().Function.Name,
  307. Input: map[string]interface{}{},
  308. },
  309. })
  310. }
  311. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools
  312. // tools delta
  313. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  314. Type: "input_json_delta",
  315. PartialJson: &chosenChoice.Delta.ToolCalls[0].Function.Arguments,
  316. }
  317. } else {
  318. reasoning := chosenChoice.Delta.GetReasoningContent()
  319. textContent := chosenChoice.Delta.GetContentString()
  320. if reasoning != "" || textContent != "" {
  321. if reasoning != "" {
  322. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeThinking {
  323. //info.ClaudeConvertInfo.Index++
  324. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  325. Index: &info.ClaudeConvertInfo.Index,
  326. Type: "content_block_start",
  327. ContentBlock: &dto.ClaudeMediaMessage{
  328. Type: "thinking",
  329. Thinking: "",
  330. },
  331. })
  332. }
  333. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking
  334. // text delta
  335. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  336. Type: "thinking_delta",
  337. Thinking: reasoning,
  338. }
  339. } else {
  340. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeText {
  341. if info.LastMessagesType == relaycommon.LastMessageTypeThinking || info.LastMessagesType == relaycommon.LastMessageTypeTools {
  342. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  343. info.ClaudeConvertInfo.Index++
  344. }
  345. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  346. Index: &info.ClaudeConvertInfo.Index,
  347. Type: "content_block_start",
  348. ContentBlock: &dto.ClaudeMediaMessage{
  349. Type: "text",
  350. Text: common.GetPointer[string](""),
  351. },
  352. })
  353. }
  354. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  355. // text delta
  356. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  357. Type: "text_delta",
  358. Text: common.GetPointer[string](textContent),
  359. }
  360. }
  361. } else {
  362. isEmpty = true
  363. }
  364. }
  365. claudeResponse.Index = &info.ClaudeConvertInfo.Index
  366. if !isEmpty {
  367. claudeResponses = append(claudeResponses, &claudeResponse)
  368. }
  369. }
  370. }
  371. return claudeResponses
  372. }
  373. func ResponseOpenAI2Claude(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.ClaudeResponse {
  374. var stopReason string
  375. contents := make([]dto.ClaudeMediaMessage, 0)
  376. claudeResponse := &dto.ClaudeResponse{
  377. Id: openAIResponse.Id,
  378. Type: "message",
  379. Role: "assistant",
  380. Model: openAIResponse.Model,
  381. }
  382. for _, choice := range openAIResponse.Choices {
  383. stopReason = stopReasonOpenAI2Claude(choice.FinishReason)
  384. claudeContent := dto.ClaudeMediaMessage{}
  385. if choice.FinishReason == "tool_calls" {
  386. claudeContent.Type = "tool_use"
  387. claudeContent.Id = choice.Message.ToolCallId
  388. claudeContent.Name = choice.Message.ParseToolCalls()[0].Function.Name
  389. var mapParams map[string]interface{}
  390. if err := json.Unmarshal([]byte(choice.Message.ParseToolCalls()[0].Function.Arguments), &mapParams); err == nil {
  391. claudeContent.Input = mapParams
  392. } else {
  393. claudeContent.Input = choice.Message.ParseToolCalls()[0].Function.Arguments
  394. }
  395. } else {
  396. claudeContent.Type = "text"
  397. claudeContent.SetText(choice.Message.StringContent())
  398. }
  399. contents = append(contents, claudeContent)
  400. }
  401. claudeResponse.Content = contents
  402. claudeResponse.StopReason = stopReason
  403. claudeResponse.Usage = &dto.ClaudeUsage{
  404. InputTokens: openAIResponse.PromptTokens,
  405. OutputTokens: openAIResponse.CompletionTokens,
  406. }
  407. return claudeResponse
  408. }
  409. func stopReasonOpenAI2Claude(reason string) string {
  410. switch reason {
  411. case "stop":
  412. return "end_turn"
  413. case "stop_sequence":
  414. return "stop_sequence"
  415. case "length":
  416. fallthrough
  417. case "max_tokens":
  418. return "max_tokens"
  419. case "tool_calls":
  420. return "tool_use"
  421. default:
  422. return reason
  423. }
  424. }
  425. func toJSONString(v interface{}) string {
  426. b, err := json.Marshal(v)
  427. if err != nil {
  428. return "{}"
  429. }
  430. return string(b)
  431. }
  432. func GeminiToOpenAIRequest(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  433. openaiRequest := &dto.GeneralOpenAIRequest{
  434. Model: info.UpstreamModelName,
  435. Stream: info.IsStream,
  436. }
  437. // 转换 messages
  438. var messages []dto.Message
  439. for _, content := range geminiRequest.Contents {
  440. message := dto.Message{
  441. Role: convertGeminiRoleToOpenAI(content.Role),
  442. }
  443. // 处理 parts
  444. var mediaContents []dto.MediaContent
  445. var toolCalls []dto.ToolCallRequest
  446. for _, part := range content.Parts {
  447. if part.Text != "" {
  448. mediaContent := dto.MediaContent{
  449. Type: "text",
  450. Text: part.Text,
  451. }
  452. mediaContents = append(mediaContents, mediaContent)
  453. } else if part.InlineData != nil {
  454. mediaContent := dto.MediaContent{
  455. Type: "image_url",
  456. ImageUrl: &dto.MessageImageUrl{
  457. Url: fmt.Sprintf("data:%s;base64,%s", part.InlineData.MimeType, part.InlineData.Data),
  458. Detail: "auto",
  459. MimeType: part.InlineData.MimeType,
  460. },
  461. }
  462. mediaContents = append(mediaContents, mediaContent)
  463. } else if part.FileData != nil {
  464. mediaContent := dto.MediaContent{
  465. Type: "image_url",
  466. ImageUrl: &dto.MessageImageUrl{
  467. Url: part.FileData.FileUri,
  468. Detail: "auto",
  469. MimeType: part.FileData.MimeType,
  470. },
  471. }
  472. mediaContents = append(mediaContents, mediaContent)
  473. } else if part.FunctionCall != nil {
  474. // 处理 Gemini 的工具调用
  475. toolCall := dto.ToolCallRequest{
  476. ID: fmt.Sprintf("call_%d", len(toolCalls)+1), // 生成唯一ID
  477. Type: "function",
  478. Function: dto.FunctionRequest{
  479. Name: part.FunctionCall.FunctionName,
  480. Arguments: toJSONString(part.FunctionCall.Arguments),
  481. },
  482. }
  483. toolCalls = append(toolCalls, toolCall)
  484. } else if part.FunctionResponse != nil {
  485. // 处理 Gemini 的工具响应,创建单独的 tool 消息
  486. toolMessage := dto.Message{
  487. Role: "tool",
  488. ToolCallId: fmt.Sprintf("call_%d", len(toolCalls)), // 使用对应的调用ID
  489. }
  490. toolMessage.SetStringContent(toJSONString(part.FunctionResponse.Response))
  491. messages = append(messages, toolMessage)
  492. }
  493. }
  494. // 设置消息内容
  495. if len(toolCalls) > 0 {
  496. // 如果有工具调用,设置工具调用
  497. message.SetToolCalls(toolCalls)
  498. } else if len(mediaContents) == 1 && mediaContents[0].Type == "text" {
  499. // 如果只有一个文本内容,直接设置字符串
  500. message.Content = mediaContents[0].Text
  501. } else if len(mediaContents) > 0 {
  502. // 如果有多个内容或包含媒体,设置为数组
  503. message.SetMediaContent(mediaContents)
  504. }
  505. // 只有当消息有内容或工具调用时才添加
  506. if len(message.ParseContent()) > 0 || len(message.ToolCalls) > 0 {
  507. messages = append(messages, message)
  508. }
  509. }
  510. openaiRequest.Messages = messages
  511. if geminiRequest.GenerationConfig.Temperature != nil {
  512. openaiRequest.Temperature = geminiRequest.GenerationConfig.Temperature
  513. }
  514. if geminiRequest.GenerationConfig.TopP > 0 {
  515. openaiRequest.TopP = geminiRequest.GenerationConfig.TopP
  516. }
  517. if geminiRequest.GenerationConfig.TopK > 0 {
  518. openaiRequest.TopK = int(geminiRequest.GenerationConfig.TopK)
  519. }
  520. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  521. openaiRequest.MaxTokens = geminiRequest.GenerationConfig.MaxOutputTokens
  522. }
  523. // gemini stop sequences 最多 5 个,openai stop 最多 4 个
  524. if len(geminiRequest.GenerationConfig.StopSequences) > 0 {
  525. openaiRequest.Stop = geminiRequest.GenerationConfig.StopSequences[:4]
  526. }
  527. if geminiRequest.GenerationConfig.CandidateCount > 0 {
  528. openaiRequest.N = geminiRequest.GenerationConfig.CandidateCount
  529. }
  530. // 转换工具调用
  531. if len(geminiRequest.Tools) > 0 {
  532. var tools []dto.ToolCallRequest
  533. for _, tool := range geminiRequest.Tools {
  534. if tool.FunctionDeclarations != nil {
  535. // 将 Gemini 的 FunctionDeclarations 转换为 OpenAI 的 ToolCallRequest
  536. functionDeclarations, ok := tool.FunctionDeclarations.([]dto.FunctionRequest)
  537. if ok {
  538. for _, function := range functionDeclarations {
  539. openAITool := dto.ToolCallRequest{
  540. Type: "function",
  541. Function: dto.FunctionRequest{
  542. Name: function.Name,
  543. Description: function.Description,
  544. Parameters: function.Parameters,
  545. },
  546. }
  547. tools = append(tools, openAITool)
  548. }
  549. }
  550. }
  551. }
  552. if len(tools) > 0 {
  553. openaiRequest.Tools = tools
  554. }
  555. }
  556. // gemini system instructions
  557. if geminiRequest.SystemInstructions != nil {
  558. // 将系统指令作为第一条消息插入
  559. systemMessage := dto.Message{
  560. Role: "system",
  561. Content: extractTextFromGeminiParts(geminiRequest.SystemInstructions.Parts),
  562. }
  563. openaiRequest.Messages = append([]dto.Message{systemMessage}, openaiRequest.Messages...)
  564. }
  565. return openaiRequest, nil
  566. }
  567. func convertGeminiRoleToOpenAI(geminiRole string) string {
  568. switch geminiRole {
  569. case "user":
  570. return "user"
  571. case "model":
  572. return "assistant"
  573. case "function":
  574. return "function"
  575. default:
  576. return "user"
  577. }
  578. }
  579. func extractTextFromGeminiParts(parts []dto.GeminiPart) string {
  580. var texts []string
  581. for _, part := range parts {
  582. if part.Text != "" {
  583. texts = append(texts, part.Text)
  584. }
  585. }
  586. return strings.Join(texts, "\n")
  587. }
  588. // ResponseOpenAI2Gemini 将 OpenAI 响应转换为 Gemini 格式
  589. func ResponseOpenAI2Gemini(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  590. geminiResponse := &dto.GeminiChatResponse{
  591. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  592. PromptFeedback: dto.GeminiChatPromptFeedback{
  593. SafetyRatings: []dto.GeminiChatSafetyRating{},
  594. },
  595. UsageMetadata: dto.GeminiUsageMetadata{
  596. PromptTokenCount: openAIResponse.PromptTokens,
  597. CandidatesTokenCount: openAIResponse.CompletionTokens,
  598. TotalTokenCount: openAIResponse.PromptTokens + openAIResponse.CompletionTokens,
  599. },
  600. }
  601. for _, choice := range openAIResponse.Choices {
  602. candidate := dto.GeminiChatCandidate{
  603. Index: int64(choice.Index),
  604. SafetyRatings: []dto.GeminiChatSafetyRating{},
  605. }
  606. // 设置结束原因
  607. var finishReason string
  608. switch choice.FinishReason {
  609. case "stop":
  610. finishReason = "STOP"
  611. case "length":
  612. finishReason = "MAX_TOKENS"
  613. case "content_filter":
  614. finishReason = "SAFETY"
  615. case "tool_calls":
  616. finishReason = "STOP"
  617. default:
  618. finishReason = "STOP"
  619. }
  620. candidate.FinishReason = &finishReason
  621. // 转换消息内容
  622. content := dto.GeminiChatContent{
  623. Role: "model",
  624. Parts: make([]dto.GeminiPart, 0),
  625. }
  626. // 处理工具调用
  627. toolCalls := choice.Message.ParseToolCalls()
  628. if len(toolCalls) > 0 {
  629. for _, toolCall := range toolCalls {
  630. // 解析参数
  631. var args map[string]interface{}
  632. if toolCall.Function.Arguments != "" {
  633. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  634. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  635. }
  636. } else {
  637. args = make(map[string]interface{})
  638. }
  639. part := dto.GeminiPart{
  640. FunctionCall: &dto.FunctionCall{
  641. FunctionName: toolCall.Function.Name,
  642. Arguments: args,
  643. },
  644. }
  645. content.Parts = append(content.Parts, part)
  646. }
  647. } else {
  648. // 处理文本内容
  649. textContent := choice.Message.StringContent()
  650. if textContent != "" {
  651. part := dto.GeminiPart{
  652. Text: textContent,
  653. }
  654. content.Parts = append(content.Parts, part)
  655. }
  656. }
  657. candidate.Content = content
  658. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  659. }
  660. return geminiResponse
  661. }
  662. // StreamResponseOpenAI2Gemini 将 OpenAI 流式响应转换为 Gemini 格式
  663. func StreamResponseOpenAI2Gemini(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  664. // 检查是否有实际内容或结束标志
  665. hasContent := false
  666. hasFinishReason := false
  667. for _, choice := range openAIResponse.Choices {
  668. if len(choice.Delta.GetContentString()) > 0 || (choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0) {
  669. hasContent = true
  670. }
  671. if choice.FinishReason != nil {
  672. hasFinishReason = true
  673. }
  674. }
  675. // 如果没有实际内容且没有结束标志,跳过。主要针对 openai 流响应开头的空数据
  676. if !hasContent && !hasFinishReason {
  677. return nil
  678. }
  679. geminiResponse := &dto.GeminiChatResponse{
  680. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  681. PromptFeedback: dto.GeminiChatPromptFeedback{
  682. SafetyRatings: []dto.GeminiChatSafetyRating{},
  683. },
  684. UsageMetadata: dto.GeminiUsageMetadata{
  685. PromptTokenCount: info.PromptTokens,
  686. CandidatesTokenCount: 0, // 流式响应中可能没有完整的 usage 信息
  687. TotalTokenCount: info.PromptTokens,
  688. },
  689. }
  690. for _, choice := range openAIResponse.Choices {
  691. candidate := dto.GeminiChatCandidate{
  692. Index: int64(choice.Index),
  693. SafetyRatings: []dto.GeminiChatSafetyRating{},
  694. }
  695. // 设置结束原因
  696. if choice.FinishReason != nil {
  697. var finishReason string
  698. switch *choice.FinishReason {
  699. case "stop":
  700. finishReason = "STOP"
  701. case "length":
  702. finishReason = "MAX_TOKENS"
  703. case "content_filter":
  704. finishReason = "SAFETY"
  705. case "tool_calls":
  706. finishReason = "STOP"
  707. default:
  708. finishReason = "STOP"
  709. }
  710. candidate.FinishReason = &finishReason
  711. }
  712. // 转换消息内容
  713. content := dto.GeminiChatContent{
  714. Role: "model",
  715. Parts: make([]dto.GeminiPart, 0),
  716. }
  717. // 处理工具调用
  718. if choice.Delta.ToolCalls != nil {
  719. for _, toolCall := range choice.Delta.ToolCalls {
  720. // 解析参数
  721. var args map[string]interface{}
  722. if toolCall.Function.Arguments != "" {
  723. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  724. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  725. }
  726. } else {
  727. args = make(map[string]interface{})
  728. }
  729. part := dto.GeminiPart{
  730. FunctionCall: &dto.FunctionCall{
  731. FunctionName: toolCall.Function.Name,
  732. Arguments: args,
  733. },
  734. }
  735. content.Parts = append(content.Parts, part)
  736. }
  737. } else {
  738. // 处理文本内容
  739. textContent := choice.Delta.GetContentString()
  740. if textContent != "" {
  741. part := dto.GeminiPart{
  742. Text: textContent,
  743. }
  744. content.Parts = append(content.Parts, part)
  745. }
  746. }
  747. candidate.Content = content
  748. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  749. }
  750. return geminiResponse
  751. }