convert.go 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808
  1. package service
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "one-api/common"
  6. "one-api/constant"
  7. "one-api/dto"
  8. "one-api/relay/channel/openrouter"
  9. relaycommon "one-api/relay/common"
  10. "strings"
  11. )
  12. func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  13. openAIRequest := dto.GeneralOpenAIRequest{
  14. Model: claudeRequest.Model,
  15. MaxTokens: claudeRequest.MaxTokens,
  16. Temperature: claudeRequest.Temperature,
  17. TopP: claudeRequest.TopP,
  18. Stream: claudeRequest.Stream,
  19. }
  20. isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter
  21. if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" {
  22. if isOpenRouter {
  23. reasoning := openrouter.RequestReasoning{
  24. MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
  25. }
  26. reasoningJSON, err := json.Marshal(reasoning)
  27. if err != nil {
  28. return nil, fmt.Errorf("failed to marshal reasoning: %w", err)
  29. }
  30. openAIRequest.Reasoning = reasoningJSON
  31. } else {
  32. thinkingSuffix := "-thinking"
  33. if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
  34. !strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
  35. openAIRequest.Model = openAIRequest.Model + thinkingSuffix
  36. }
  37. }
  38. }
  39. // Convert stop sequences
  40. if len(claudeRequest.StopSequences) == 1 {
  41. openAIRequest.Stop = claudeRequest.StopSequences[0]
  42. } else if len(claudeRequest.StopSequences) > 1 {
  43. openAIRequest.Stop = claudeRequest.StopSequences
  44. }
  45. // Convert tools
  46. tools, _ := common.Any2Type[[]dto.Tool](claudeRequest.Tools)
  47. openAITools := make([]dto.ToolCallRequest, 0)
  48. for _, claudeTool := range tools {
  49. openAITool := dto.ToolCallRequest{
  50. Type: "function",
  51. Function: dto.FunctionRequest{
  52. Name: claudeTool.Name,
  53. Description: claudeTool.Description,
  54. Parameters: claudeTool.InputSchema,
  55. },
  56. }
  57. openAITools = append(openAITools, openAITool)
  58. }
  59. openAIRequest.Tools = openAITools
  60. // Convert messages
  61. openAIMessages := make([]dto.Message, 0)
  62. // Add system message if present
  63. if claudeRequest.System != nil {
  64. if claudeRequest.IsStringSystem() && claudeRequest.GetStringSystem() != "" {
  65. openAIMessage := dto.Message{
  66. Role: "system",
  67. }
  68. openAIMessage.SetStringContent(claudeRequest.GetStringSystem())
  69. openAIMessages = append(openAIMessages, openAIMessage)
  70. } else {
  71. systems := claudeRequest.ParseSystem()
  72. if len(systems) > 0 {
  73. openAIMessage := dto.Message{
  74. Role: "system",
  75. }
  76. isOpenRouterClaude := isOpenRouter && strings.HasPrefix(info.UpstreamModelName, "anthropic/claude")
  77. if isOpenRouterClaude {
  78. systemMediaMessages := make([]dto.MediaContent, 0, len(systems))
  79. for _, system := range systems {
  80. message := dto.MediaContent{
  81. Type: "text",
  82. Text: system.GetText(),
  83. CacheControl: system.CacheControl,
  84. }
  85. systemMediaMessages = append(systemMediaMessages, message)
  86. }
  87. openAIMessage.SetMediaContent(systemMediaMessages)
  88. } else {
  89. systemStr := ""
  90. for _, system := range systems {
  91. if system.Text != nil {
  92. systemStr += *system.Text
  93. }
  94. }
  95. openAIMessage.SetStringContent(systemStr)
  96. }
  97. openAIMessages = append(openAIMessages, openAIMessage)
  98. }
  99. }
  100. }
  101. for _, claudeMessage := range claudeRequest.Messages {
  102. openAIMessage := dto.Message{
  103. Role: claudeMessage.Role,
  104. }
  105. //log.Printf("claudeMessage.Content: %v", claudeMessage.Content)
  106. if claudeMessage.IsStringContent() {
  107. openAIMessage.SetStringContent(claudeMessage.GetStringContent())
  108. } else {
  109. content, err := claudeMessage.ParseContent()
  110. if err != nil {
  111. return nil, err
  112. }
  113. contents := content
  114. var toolCalls []dto.ToolCallRequest
  115. mediaMessages := make([]dto.MediaContent, 0, len(contents))
  116. for _, mediaMsg := range contents {
  117. switch mediaMsg.Type {
  118. case "text":
  119. message := dto.MediaContent{
  120. Type: "text",
  121. Text: mediaMsg.GetText(),
  122. CacheControl: mediaMsg.CacheControl,
  123. }
  124. mediaMessages = append(mediaMessages, message)
  125. case "image":
  126. // Handle image conversion (base64 to URL or keep as is)
  127. imageData := fmt.Sprintf("data:%s;base64,%s", mediaMsg.Source.MediaType, mediaMsg.Source.Data)
  128. //textContent += fmt.Sprintf("[Image: %s]", imageData)
  129. mediaMessage := dto.MediaContent{
  130. Type: "image_url",
  131. ImageUrl: &dto.MessageImageUrl{Url: imageData},
  132. }
  133. mediaMessages = append(mediaMessages, mediaMessage)
  134. case "tool_use":
  135. toolCall := dto.ToolCallRequest{
  136. ID: mediaMsg.Id,
  137. Type: "function",
  138. Function: dto.FunctionRequest{
  139. Name: mediaMsg.Name,
  140. Arguments: toJSONString(mediaMsg.Input),
  141. },
  142. }
  143. toolCalls = append(toolCalls, toolCall)
  144. case "tool_result":
  145. // Add tool result as a separate message
  146. oaiToolMessage := dto.Message{
  147. Role: "tool",
  148. Name: &mediaMsg.Name,
  149. ToolCallId: mediaMsg.ToolUseId,
  150. }
  151. //oaiToolMessage.SetStringContent(*mediaMsg.GetMediaContent().Text)
  152. if mediaMsg.IsStringContent() {
  153. oaiToolMessage.SetStringContent(mediaMsg.GetStringContent())
  154. } else {
  155. mediaContents := mediaMsg.ParseMediaContent()
  156. encodeJson, _ := common.Marshal(mediaContents)
  157. oaiToolMessage.SetStringContent(string(encodeJson))
  158. }
  159. openAIMessages = append(openAIMessages, oaiToolMessage)
  160. }
  161. }
  162. if len(toolCalls) > 0 {
  163. openAIMessage.SetToolCalls(toolCalls)
  164. }
  165. if len(mediaMessages) > 0 && len(toolCalls) == 0 {
  166. openAIMessage.SetMediaContent(mediaMessages)
  167. }
  168. }
  169. if len(openAIMessage.ParseContent()) > 0 || len(openAIMessage.ToolCalls) > 0 {
  170. openAIMessages = append(openAIMessages, openAIMessage)
  171. }
  172. }
  173. openAIRequest.Messages = openAIMessages
  174. return &openAIRequest, nil
  175. }
  176. func generateStopBlock(index int) *dto.ClaudeResponse {
  177. return &dto.ClaudeResponse{
  178. Type: "content_block_stop",
  179. Index: common.GetPointer[int](index),
  180. }
  181. }
  182. func StreamResponseOpenAI2Claude(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) []*dto.ClaudeResponse {
  183. var claudeResponses []*dto.ClaudeResponse
  184. if info.SendResponseCount == 1 {
  185. msg := &dto.ClaudeMediaMessage{
  186. Id: openAIResponse.Id,
  187. Model: openAIResponse.Model,
  188. Type: "message",
  189. Role: "assistant",
  190. Usage: &dto.ClaudeUsage{
  191. InputTokens: info.PromptTokens,
  192. OutputTokens: 0,
  193. },
  194. }
  195. msg.SetContent(make([]any, 0))
  196. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  197. Type: "message_start",
  198. Message: msg,
  199. })
  200. claudeResponses = append(claudeResponses)
  201. //claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  202. // Type: "ping",
  203. //})
  204. if openAIResponse.IsToolCall() {
  205. resp := &dto.ClaudeResponse{
  206. Type: "content_block_start",
  207. ContentBlock: &dto.ClaudeMediaMessage{
  208. Id: openAIResponse.GetFirstToolCall().ID,
  209. Type: "tool_use",
  210. Name: openAIResponse.GetFirstToolCall().Function.Name,
  211. },
  212. }
  213. resp.SetIndex(0)
  214. claudeResponses = append(claudeResponses, resp)
  215. } else {
  216. }
  217. // 判断首个响应是否存在内容(非标准的 OpenAI 响应)
  218. if len(openAIResponse.Choices) > 0 && len(openAIResponse.Choices[0].Delta.GetContentString()) > 0 {
  219. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  220. Index: &info.ClaudeConvertInfo.Index,
  221. Type: "content_block_start",
  222. ContentBlock: &dto.ClaudeMediaMessage{
  223. Type: "text",
  224. Text: common.GetPointer[string](""),
  225. },
  226. })
  227. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  228. Type: "content_block_delta",
  229. Delta: &dto.ClaudeMediaMessage{
  230. Type: "text",
  231. Text: common.GetPointer[string](openAIResponse.Choices[0].Delta.GetContentString()),
  232. },
  233. })
  234. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  235. }
  236. return claudeResponses
  237. }
  238. if len(openAIResponse.Choices) == 0 {
  239. // no choices
  240. // 可能为非标准的 OpenAI 响应,判断是否已经完成
  241. if info.Done {
  242. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  243. oaiUsage := info.ClaudeConvertInfo.Usage
  244. if oaiUsage != nil {
  245. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  246. Type: "message_delta",
  247. Usage: &dto.ClaudeUsage{
  248. InputTokens: oaiUsage.PromptTokens,
  249. OutputTokens: oaiUsage.CompletionTokens,
  250. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  251. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  252. },
  253. Delta: &dto.ClaudeMediaMessage{
  254. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  255. },
  256. })
  257. }
  258. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  259. Type: "message_stop",
  260. })
  261. }
  262. return claudeResponses
  263. } else {
  264. chosenChoice := openAIResponse.Choices[0]
  265. if chosenChoice.FinishReason != nil && *chosenChoice.FinishReason != "" {
  266. // should be done
  267. info.FinishReason = *chosenChoice.FinishReason
  268. if !info.Done {
  269. return claudeResponses
  270. }
  271. }
  272. if info.Done {
  273. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  274. oaiUsage := info.ClaudeConvertInfo.Usage
  275. if oaiUsage != nil {
  276. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  277. Type: "message_delta",
  278. Usage: &dto.ClaudeUsage{
  279. InputTokens: oaiUsage.PromptTokens,
  280. OutputTokens: oaiUsage.CompletionTokens,
  281. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  282. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  283. },
  284. Delta: &dto.ClaudeMediaMessage{
  285. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  286. },
  287. })
  288. }
  289. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  290. Type: "message_stop",
  291. })
  292. } else {
  293. var claudeResponse dto.ClaudeResponse
  294. var isEmpty bool
  295. claudeResponse.Type = "content_block_delta"
  296. if len(chosenChoice.Delta.ToolCalls) > 0 {
  297. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeTools {
  298. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  299. info.ClaudeConvertInfo.Index++
  300. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  301. Index: &info.ClaudeConvertInfo.Index,
  302. Type: "content_block_start",
  303. ContentBlock: &dto.ClaudeMediaMessage{
  304. Id: openAIResponse.GetFirstToolCall().ID,
  305. Type: "tool_use",
  306. Name: openAIResponse.GetFirstToolCall().Function.Name,
  307. Input: map[string]interface{}{},
  308. },
  309. })
  310. }
  311. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools
  312. // tools delta
  313. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  314. Type: "input_json_delta",
  315. PartialJson: &chosenChoice.Delta.ToolCalls[0].Function.Arguments,
  316. }
  317. } else {
  318. reasoning := chosenChoice.Delta.GetReasoningContent()
  319. textContent := chosenChoice.Delta.GetContentString()
  320. if reasoning != "" || textContent != "" {
  321. if reasoning != "" {
  322. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeThinking {
  323. //info.ClaudeConvertInfo.Index++
  324. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  325. Index: &info.ClaudeConvertInfo.Index,
  326. Type: "content_block_start",
  327. ContentBlock: &dto.ClaudeMediaMessage{
  328. Type: "thinking",
  329. Thinking: "",
  330. },
  331. })
  332. }
  333. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking
  334. // text delta
  335. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  336. Type: "thinking_delta",
  337. Thinking: reasoning,
  338. }
  339. } else {
  340. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeText {
  341. if info.LastMessagesType == relaycommon.LastMessageTypeThinking || info.LastMessagesType == relaycommon.LastMessageTypeTools {
  342. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  343. info.ClaudeConvertInfo.Index++
  344. }
  345. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  346. Index: &info.ClaudeConvertInfo.Index,
  347. Type: "content_block_start",
  348. ContentBlock: &dto.ClaudeMediaMessage{
  349. Type: "text",
  350. Text: common.GetPointer[string](""),
  351. },
  352. })
  353. }
  354. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  355. // text delta
  356. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  357. Type: "text_delta",
  358. Text: common.GetPointer[string](textContent),
  359. }
  360. }
  361. } else {
  362. isEmpty = true
  363. }
  364. }
  365. claudeResponse.Index = &info.ClaudeConvertInfo.Index
  366. if !isEmpty {
  367. claudeResponses = append(claudeResponses, &claudeResponse)
  368. }
  369. }
  370. }
  371. return claudeResponses
  372. }
  373. func ResponseOpenAI2Claude(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.ClaudeResponse {
  374. var stopReason string
  375. contents := make([]dto.ClaudeMediaMessage, 0)
  376. claudeResponse := &dto.ClaudeResponse{
  377. Id: openAIResponse.Id,
  378. Type: "message",
  379. Role: "assistant",
  380. Model: openAIResponse.Model,
  381. }
  382. for _, choice := range openAIResponse.Choices {
  383. stopReason = stopReasonOpenAI2Claude(choice.FinishReason)
  384. if choice.FinishReason == "tool_calls" {
  385. for _, toolUse := range choice.Message.ParseToolCalls() {
  386. claudeContent := dto.ClaudeMediaMessage{}
  387. claudeContent.Type = "tool_use"
  388. claudeContent.Id = toolUse.ID
  389. claudeContent.Name = toolUse.Function.Name
  390. var mapParams map[string]interface{}
  391. if err := common.Unmarshal([]byte(toolUse.Function.Arguments), &mapParams); err == nil {
  392. claudeContent.Input = mapParams
  393. } else {
  394. claudeContent.Input = toolUse.Function.Arguments
  395. }
  396. contents = append(contents, claudeContent)
  397. }
  398. } else {
  399. claudeContent := dto.ClaudeMediaMessage{}
  400. claudeContent.Type = "text"
  401. claudeContent.SetText(choice.Message.StringContent())
  402. contents = append(contents, claudeContent)
  403. }
  404. }
  405. claudeResponse.Content = contents
  406. claudeResponse.StopReason = stopReason
  407. claudeResponse.Usage = &dto.ClaudeUsage{
  408. InputTokens: openAIResponse.PromptTokens,
  409. OutputTokens: openAIResponse.CompletionTokens,
  410. }
  411. return claudeResponse
  412. }
  413. func stopReasonOpenAI2Claude(reason string) string {
  414. switch reason {
  415. case "stop":
  416. return "end_turn"
  417. case "stop_sequence":
  418. return "stop_sequence"
  419. case "length":
  420. fallthrough
  421. case "max_tokens":
  422. return "max_tokens"
  423. case "tool_calls":
  424. return "tool_use"
  425. default:
  426. return reason
  427. }
  428. }
  429. func toJSONString(v interface{}) string {
  430. b, err := json.Marshal(v)
  431. if err != nil {
  432. return "{}"
  433. }
  434. return string(b)
  435. }
  436. func GeminiToOpenAIRequest(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  437. openaiRequest := &dto.GeneralOpenAIRequest{
  438. Model: info.UpstreamModelName,
  439. Stream: info.IsStream,
  440. }
  441. // 转换 messages
  442. var messages []dto.Message
  443. for _, content := range geminiRequest.Contents {
  444. message := dto.Message{
  445. Role: convertGeminiRoleToOpenAI(content.Role),
  446. }
  447. // 处理 parts
  448. var mediaContents []dto.MediaContent
  449. var toolCalls []dto.ToolCallRequest
  450. for _, part := range content.Parts {
  451. if part.Text != "" {
  452. mediaContent := dto.MediaContent{
  453. Type: "text",
  454. Text: part.Text,
  455. }
  456. mediaContents = append(mediaContents, mediaContent)
  457. } else if part.InlineData != nil {
  458. mediaContent := dto.MediaContent{
  459. Type: "image_url",
  460. ImageUrl: &dto.MessageImageUrl{
  461. Url: fmt.Sprintf("data:%s;base64,%s", part.InlineData.MimeType, part.InlineData.Data),
  462. Detail: "auto",
  463. MimeType: part.InlineData.MimeType,
  464. },
  465. }
  466. mediaContents = append(mediaContents, mediaContent)
  467. } else if part.FileData != nil {
  468. mediaContent := dto.MediaContent{
  469. Type: "image_url",
  470. ImageUrl: &dto.MessageImageUrl{
  471. Url: part.FileData.FileUri,
  472. Detail: "auto",
  473. MimeType: part.FileData.MimeType,
  474. },
  475. }
  476. mediaContents = append(mediaContents, mediaContent)
  477. } else if part.FunctionCall != nil {
  478. // 处理 Gemini 的工具调用
  479. toolCall := dto.ToolCallRequest{
  480. ID: fmt.Sprintf("call_%d", len(toolCalls)+1), // 生成唯一ID
  481. Type: "function",
  482. Function: dto.FunctionRequest{
  483. Name: part.FunctionCall.FunctionName,
  484. Arguments: toJSONString(part.FunctionCall.Arguments),
  485. },
  486. }
  487. toolCalls = append(toolCalls, toolCall)
  488. } else if part.FunctionResponse != nil {
  489. // 处理 Gemini 的工具响应,创建单独的 tool 消息
  490. toolMessage := dto.Message{
  491. Role: "tool",
  492. ToolCallId: fmt.Sprintf("call_%d", len(toolCalls)), // 使用对应的调用ID
  493. }
  494. toolMessage.SetStringContent(toJSONString(part.FunctionResponse.Response))
  495. messages = append(messages, toolMessage)
  496. }
  497. }
  498. // 设置消息内容
  499. if len(toolCalls) > 0 {
  500. // 如果有工具调用,设置工具调用
  501. message.SetToolCalls(toolCalls)
  502. } else if len(mediaContents) == 1 && mediaContents[0].Type == "text" {
  503. // 如果只有一个文本内容,直接设置字符串
  504. message.Content = mediaContents[0].Text
  505. } else if len(mediaContents) > 0 {
  506. // 如果有多个内容或包含媒体,设置为数组
  507. message.SetMediaContent(mediaContents)
  508. }
  509. // 只有当消息有内容或工具调用时才添加
  510. if len(message.ParseContent()) > 0 || len(message.ToolCalls) > 0 {
  511. messages = append(messages, message)
  512. }
  513. }
  514. openaiRequest.Messages = messages
  515. if geminiRequest.GenerationConfig.Temperature != nil {
  516. openaiRequest.Temperature = geminiRequest.GenerationConfig.Temperature
  517. }
  518. if geminiRequest.GenerationConfig.TopP > 0 {
  519. openaiRequest.TopP = geminiRequest.GenerationConfig.TopP
  520. }
  521. if geminiRequest.GenerationConfig.TopK > 0 {
  522. openaiRequest.TopK = int(geminiRequest.GenerationConfig.TopK)
  523. }
  524. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  525. openaiRequest.MaxTokens = geminiRequest.GenerationConfig.MaxOutputTokens
  526. }
  527. // gemini stop sequences 最多 5 个,openai stop 最多 4 个
  528. if len(geminiRequest.GenerationConfig.StopSequences) > 0 {
  529. openaiRequest.Stop = geminiRequest.GenerationConfig.StopSequences[:4]
  530. }
  531. if geminiRequest.GenerationConfig.CandidateCount > 0 {
  532. openaiRequest.N = geminiRequest.GenerationConfig.CandidateCount
  533. }
  534. // 转换工具调用
  535. if len(geminiRequest.Tools) > 0 {
  536. var tools []dto.ToolCallRequest
  537. for _, tool := range geminiRequest.Tools {
  538. if tool.FunctionDeclarations != nil {
  539. // 将 Gemini 的 FunctionDeclarations 转换为 OpenAI 的 ToolCallRequest
  540. functionDeclarations, ok := tool.FunctionDeclarations.([]dto.FunctionRequest)
  541. if ok {
  542. for _, function := range functionDeclarations {
  543. openAITool := dto.ToolCallRequest{
  544. Type: "function",
  545. Function: dto.FunctionRequest{
  546. Name: function.Name,
  547. Description: function.Description,
  548. Parameters: function.Parameters,
  549. },
  550. }
  551. tools = append(tools, openAITool)
  552. }
  553. }
  554. }
  555. }
  556. if len(tools) > 0 {
  557. openaiRequest.Tools = tools
  558. }
  559. }
  560. // gemini system instructions
  561. if geminiRequest.SystemInstructions != nil {
  562. // 将系统指令作为第一条消息插入
  563. systemMessage := dto.Message{
  564. Role: "system",
  565. Content: extractTextFromGeminiParts(geminiRequest.SystemInstructions.Parts),
  566. }
  567. openaiRequest.Messages = append([]dto.Message{systemMessage}, openaiRequest.Messages...)
  568. }
  569. return openaiRequest, nil
  570. }
  571. func convertGeminiRoleToOpenAI(geminiRole string) string {
  572. switch geminiRole {
  573. case "user":
  574. return "user"
  575. case "model":
  576. return "assistant"
  577. case "function":
  578. return "function"
  579. default:
  580. return "user"
  581. }
  582. }
  583. func extractTextFromGeminiParts(parts []dto.GeminiPart) string {
  584. var texts []string
  585. for _, part := range parts {
  586. if part.Text != "" {
  587. texts = append(texts, part.Text)
  588. }
  589. }
  590. return strings.Join(texts, "\n")
  591. }
  592. // ResponseOpenAI2Gemini 将 OpenAI 响应转换为 Gemini 格式
  593. func ResponseOpenAI2Gemini(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  594. geminiResponse := &dto.GeminiChatResponse{
  595. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  596. PromptFeedback: dto.GeminiChatPromptFeedback{
  597. SafetyRatings: []dto.GeminiChatSafetyRating{},
  598. },
  599. UsageMetadata: dto.GeminiUsageMetadata{
  600. PromptTokenCount: openAIResponse.PromptTokens,
  601. CandidatesTokenCount: openAIResponse.CompletionTokens,
  602. TotalTokenCount: openAIResponse.PromptTokens + openAIResponse.CompletionTokens,
  603. },
  604. }
  605. for _, choice := range openAIResponse.Choices {
  606. candidate := dto.GeminiChatCandidate{
  607. Index: int64(choice.Index),
  608. SafetyRatings: []dto.GeminiChatSafetyRating{},
  609. }
  610. // 设置结束原因
  611. var finishReason string
  612. switch choice.FinishReason {
  613. case "stop":
  614. finishReason = "STOP"
  615. case "length":
  616. finishReason = "MAX_TOKENS"
  617. case "content_filter":
  618. finishReason = "SAFETY"
  619. case "tool_calls":
  620. finishReason = "STOP"
  621. default:
  622. finishReason = "STOP"
  623. }
  624. candidate.FinishReason = &finishReason
  625. // 转换消息内容
  626. content := dto.GeminiChatContent{
  627. Role: "model",
  628. Parts: make([]dto.GeminiPart, 0),
  629. }
  630. // 处理工具调用
  631. toolCalls := choice.Message.ParseToolCalls()
  632. if len(toolCalls) > 0 {
  633. for _, toolCall := range toolCalls {
  634. // 解析参数
  635. var args map[string]interface{}
  636. if toolCall.Function.Arguments != "" {
  637. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  638. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  639. }
  640. } else {
  641. args = make(map[string]interface{})
  642. }
  643. part := dto.GeminiPart{
  644. FunctionCall: &dto.FunctionCall{
  645. FunctionName: toolCall.Function.Name,
  646. Arguments: args,
  647. },
  648. }
  649. content.Parts = append(content.Parts, part)
  650. }
  651. } else {
  652. // 处理文本内容
  653. textContent := choice.Message.StringContent()
  654. if textContent != "" {
  655. part := dto.GeminiPart{
  656. Text: textContent,
  657. }
  658. content.Parts = append(content.Parts, part)
  659. }
  660. }
  661. candidate.Content = content
  662. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  663. }
  664. return geminiResponse
  665. }
  666. // StreamResponseOpenAI2Gemini 将 OpenAI 流式响应转换为 Gemini 格式
  667. func StreamResponseOpenAI2Gemini(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  668. // 检查是否有实际内容或结束标志
  669. hasContent := false
  670. hasFinishReason := false
  671. for _, choice := range openAIResponse.Choices {
  672. if len(choice.Delta.GetContentString()) > 0 || (choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0) {
  673. hasContent = true
  674. }
  675. if choice.FinishReason != nil {
  676. hasFinishReason = true
  677. }
  678. }
  679. // 如果没有实际内容且没有结束标志,跳过。主要针对 openai 流响应开头的空数据
  680. if !hasContent && !hasFinishReason {
  681. return nil
  682. }
  683. geminiResponse := &dto.GeminiChatResponse{
  684. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  685. PromptFeedback: dto.GeminiChatPromptFeedback{
  686. SafetyRatings: []dto.GeminiChatSafetyRating{},
  687. },
  688. UsageMetadata: dto.GeminiUsageMetadata{
  689. PromptTokenCount: info.PromptTokens,
  690. CandidatesTokenCount: 0, // 流式响应中可能没有完整的 usage 信息
  691. TotalTokenCount: info.PromptTokens,
  692. },
  693. }
  694. for _, choice := range openAIResponse.Choices {
  695. candidate := dto.GeminiChatCandidate{
  696. Index: int64(choice.Index),
  697. SafetyRatings: []dto.GeminiChatSafetyRating{},
  698. }
  699. // 设置结束原因
  700. if choice.FinishReason != nil {
  701. var finishReason string
  702. switch *choice.FinishReason {
  703. case "stop":
  704. finishReason = "STOP"
  705. case "length":
  706. finishReason = "MAX_TOKENS"
  707. case "content_filter":
  708. finishReason = "SAFETY"
  709. case "tool_calls":
  710. finishReason = "STOP"
  711. default:
  712. finishReason = "STOP"
  713. }
  714. candidate.FinishReason = &finishReason
  715. }
  716. // 转换消息内容
  717. content := dto.GeminiChatContent{
  718. Role: "model",
  719. Parts: make([]dto.GeminiPart, 0),
  720. }
  721. // 处理工具调用
  722. if choice.Delta.ToolCalls != nil {
  723. for _, toolCall := range choice.Delta.ToolCalls {
  724. // 解析参数
  725. var args map[string]interface{}
  726. if toolCall.Function.Arguments != "" {
  727. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  728. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  729. }
  730. } else {
  731. args = make(map[string]interface{})
  732. }
  733. part := dto.GeminiPart{
  734. FunctionCall: &dto.FunctionCall{
  735. FunctionName: toolCall.Function.Name,
  736. Arguments: args,
  737. },
  738. }
  739. content.Parts = append(content.Parts, part)
  740. }
  741. } else {
  742. // 处理文本内容
  743. textContent := choice.Delta.GetContentString()
  744. if textContent != "" {
  745. part := dto.GeminiPart{
  746. Text: textContent,
  747. }
  748. content.Parts = append(content.Parts, part)
  749. }
  750. }
  751. candidate.Content = content
  752. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  753. }
  754. return geminiResponse
  755. }