convert.go 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814
  1. package service
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "one-api/common"
  6. "one-api/constant"
  7. "one-api/dto"
  8. "one-api/relay/channel/openrouter"
  9. relaycommon "one-api/relay/common"
  10. "strings"
  11. )
  12. func ClaudeToOpenAIRequest(claudeRequest dto.ClaudeRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  13. openAIRequest := dto.GeneralOpenAIRequest{
  14. Model: claudeRequest.Model,
  15. MaxTokens: claudeRequest.MaxTokens,
  16. Temperature: claudeRequest.Temperature,
  17. TopP: claudeRequest.TopP,
  18. Stream: claudeRequest.Stream,
  19. }
  20. isOpenRouter := info.ChannelType == constant.ChannelTypeOpenRouter
  21. if claudeRequest.Thinking != nil && claudeRequest.Thinking.Type == "enabled" {
  22. if isOpenRouter {
  23. reasoning := openrouter.RequestReasoning{
  24. MaxTokens: claudeRequest.Thinking.GetBudgetTokens(),
  25. }
  26. reasoningJSON, err := json.Marshal(reasoning)
  27. if err != nil {
  28. return nil, fmt.Errorf("failed to marshal reasoning: %w", err)
  29. }
  30. openAIRequest.Reasoning = reasoningJSON
  31. } else {
  32. thinkingSuffix := "-thinking"
  33. if strings.HasSuffix(info.OriginModelName, thinkingSuffix) &&
  34. !strings.HasSuffix(openAIRequest.Model, thinkingSuffix) {
  35. openAIRequest.Model = openAIRequest.Model + thinkingSuffix
  36. }
  37. }
  38. }
  39. // Convert stop sequences
  40. if len(claudeRequest.StopSequences) == 1 {
  41. openAIRequest.Stop = claudeRequest.StopSequences[0]
  42. } else if len(claudeRequest.StopSequences) > 1 {
  43. openAIRequest.Stop = claudeRequest.StopSequences
  44. }
  45. // Convert tools
  46. tools, _ := common.Any2Type[[]dto.Tool](claudeRequest.Tools)
  47. openAITools := make([]dto.ToolCallRequest, 0)
  48. for _, claudeTool := range tools {
  49. openAITool := dto.ToolCallRequest{
  50. Type: "function",
  51. Function: dto.FunctionRequest{
  52. Name: claudeTool.Name,
  53. Description: claudeTool.Description,
  54. Parameters: claudeTool.InputSchema,
  55. },
  56. }
  57. openAITools = append(openAITools, openAITool)
  58. }
  59. openAIRequest.Tools = openAITools
  60. // Convert messages
  61. openAIMessages := make([]dto.Message, 0)
  62. // Add system message if present
  63. if claudeRequest.System != nil {
  64. if claudeRequest.IsStringSystem() && claudeRequest.GetStringSystem() != "" {
  65. openAIMessage := dto.Message{
  66. Role: "system",
  67. }
  68. openAIMessage.SetStringContent(claudeRequest.GetStringSystem())
  69. openAIMessages = append(openAIMessages, openAIMessage)
  70. } else {
  71. systems := claudeRequest.ParseSystem()
  72. if len(systems) > 0 {
  73. openAIMessage := dto.Message{
  74. Role: "system",
  75. }
  76. isOpenRouterClaude := isOpenRouter && strings.HasPrefix(info.UpstreamModelName, "anthropic/claude")
  77. if isOpenRouterClaude {
  78. systemMediaMessages := make([]dto.MediaContent, 0, len(systems))
  79. for _, system := range systems {
  80. message := dto.MediaContent{
  81. Type: "text",
  82. Text: system.GetText(),
  83. CacheControl: system.CacheControl,
  84. }
  85. systemMediaMessages = append(systemMediaMessages, message)
  86. }
  87. openAIMessage.SetMediaContent(systemMediaMessages)
  88. } else {
  89. systemStr := ""
  90. for _, system := range systems {
  91. if system.Text != nil {
  92. systemStr += *system.Text
  93. }
  94. }
  95. openAIMessage.SetStringContent(systemStr)
  96. }
  97. openAIMessages = append(openAIMessages, openAIMessage)
  98. }
  99. }
  100. }
  101. for _, claudeMessage := range claudeRequest.Messages {
  102. openAIMessage := dto.Message{
  103. Role: claudeMessage.Role,
  104. }
  105. //log.Printf("claudeMessage.Content: %v", claudeMessage.Content)
  106. if claudeMessage.IsStringContent() {
  107. openAIMessage.SetStringContent(claudeMessage.GetStringContent())
  108. } else {
  109. content, err := claudeMessage.ParseContent()
  110. if err != nil {
  111. return nil, err
  112. }
  113. contents := content
  114. var toolCalls []dto.ToolCallRequest
  115. mediaMessages := make([]dto.MediaContent, 0, len(contents))
  116. for _, mediaMsg := range contents {
  117. switch mediaMsg.Type {
  118. case "text":
  119. message := dto.MediaContent{
  120. Type: "text",
  121. Text: mediaMsg.GetText(),
  122. CacheControl: mediaMsg.CacheControl,
  123. }
  124. mediaMessages = append(mediaMessages, message)
  125. case "image":
  126. // Handle image conversion (base64 to URL or keep as is)
  127. imageData := fmt.Sprintf("data:%s;base64,%s", mediaMsg.Source.MediaType, mediaMsg.Source.Data)
  128. //textContent += fmt.Sprintf("[Image: %s]", imageData)
  129. mediaMessage := dto.MediaContent{
  130. Type: "image_url",
  131. ImageUrl: &dto.MessageImageUrl{Url: imageData},
  132. }
  133. mediaMessages = append(mediaMessages, mediaMessage)
  134. case "tool_use":
  135. toolCall := dto.ToolCallRequest{
  136. ID: mediaMsg.Id,
  137. Type: "function",
  138. Function: dto.FunctionRequest{
  139. Name: mediaMsg.Name,
  140. Arguments: toJSONString(mediaMsg.Input),
  141. },
  142. }
  143. toolCalls = append(toolCalls, toolCall)
  144. case "tool_result":
  145. // Add tool result as a separate message
  146. toolName := mediaMsg.Name
  147. if toolName == "" {
  148. toolName = claudeRequest.SearchToolNameByToolCallId(mediaMsg.ToolUseId)
  149. }
  150. oaiToolMessage := dto.Message{
  151. Role: "tool",
  152. Name: &toolName,
  153. ToolCallId: mediaMsg.ToolUseId,
  154. }
  155. //oaiToolMessage.SetStringContent(*mediaMsg.GetMediaContent().Text)
  156. if mediaMsg.IsStringContent() {
  157. oaiToolMessage.SetStringContent(mediaMsg.GetStringContent())
  158. } else {
  159. mediaContents := mediaMsg.ParseMediaContent()
  160. encodeJson, _ := common.Marshal(mediaContents)
  161. oaiToolMessage.SetStringContent(string(encodeJson))
  162. }
  163. openAIMessages = append(openAIMessages, oaiToolMessage)
  164. }
  165. }
  166. if len(toolCalls) > 0 {
  167. openAIMessage.SetToolCalls(toolCalls)
  168. }
  169. if len(mediaMessages) > 0 && len(toolCalls) == 0 {
  170. openAIMessage.SetMediaContent(mediaMessages)
  171. }
  172. }
  173. if len(openAIMessage.ParseContent()) > 0 || len(openAIMessage.ToolCalls) > 0 {
  174. openAIMessages = append(openAIMessages, openAIMessage)
  175. }
  176. }
  177. openAIRequest.Messages = openAIMessages
  178. return &openAIRequest, nil
  179. }
  180. func generateStopBlock(index int) *dto.ClaudeResponse {
  181. return &dto.ClaudeResponse{
  182. Type: "content_block_stop",
  183. Index: common.GetPointer[int](index),
  184. }
  185. }
  186. func StreamResponseOpenAI2Claude(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) []*dto.ClaudeResponse {
  187. var claudeResponses []*dto.ClaudeResponse
  188. if info.SendResponseCount == 1 {
  189. msg := &dto.ClaudeMediaMessage{
  190. Id: openAIResponse.Id,
  191. Model: openAIResponse.Model,
  192. Type: "message",
  193. Role: "assistant",
  194. Usage: &dto.ClaudeUsage{
  195. InputTokens: info.PromptTokens,
  196. OutputTokens: 0,
  197. },
  198. }
  199. msg.SetContent(make([]any, 0))
  200. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  201. Type: "message_start",
  202. Message: msg,
  203. })
  204. claudeResponses = append(claudeResponses)
  205. //claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  206. // Type: "ping",
  207. //})
  208. if openAIResponse.IsToolCall() {
  209. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools
  210. resp := &dto.ClaudeResponse{
  211. Type: "content_block_start",
  212. ContentBlock: &dto.ClaudeMediaMessage{
  213. Id: openAIResponse.GetFirstToolCall().ID,
  214. Type: "tool_use",
  215. Name: openAIResponse.GetFirstToolCall().Function.Name,
  216. Input: map[string]interface{}{},
  217. },
  218. }
  219. resp.SetIndex(0)
  220. claudeResponses = append(claudeResponses, resp)
  221. } else {
  222. }
  223. // 判断首个响应是否存在内容(非标准的 OpenAI 响应)
  224. if len(openAIResponse.Choices) > 0 && len(openAIResponse.Choices[0].Delta.GetContentString()) > 0 {
  225. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  226. Index: &info.ClaudeConvertInfo.Index,
  227. Type: "content_block_start",
  228. ContentBlock: &dto.ClaudeMediaMessage{
  229. Type: "text",
  230. Text: common.GetPointer[string](""),
  231. },
  232. })
  233. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  234. Type: "content_block_delta",
  235. Delta: &dto.ClaudeMediaMessage{
  236. Type: "text",
  237. Text: common.GetPointer[string](openAIResponse.Choices[0].Delta.GetContentString()),
  238. },
  239. })
  240. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  241. }
  242. return claudeResponses
  243. }
  244. if len(openAIResponse.Choices) == 0 {
  245. // no choices
  246. // 可能为非标准的 OpenAI 响应,判断是否已经完成
  247. if info.Done {
  248. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  249. oaiUsage := info.ClaudeConvertInfo.Usage
  250. if oaiUsage != nil {
  251. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  252. Type: "message_delta",
  253. Usage: &dto.ClaudeUsage{
  254. InputTokens: oaiUsage.PromptTokens,
  255. OutputTokens: oaiUsage.CompletionTokens,
  256. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  257. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  258. },
  259. Delta: &dto.ClaudeMediaMessage{
  260. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  261. },
  262. })
  263. }
  264. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  265. Type: "message_stop",
  266. })
  267. }
  268. return claudeResponses
  269. } else {
  270. chosenChoice := openAIResponse.Choices[0]
  271. if chosenChoice.FinishReason != nil && *chosenChoice.FinishReason != "" {
  272. // should be done
  273. info.FinishReason = *chosenChoice.FinishReason
  274. if !info.Done {
  275. return claudeResponses
  276. }
  277. }
  278. if info.Done {
  279. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  280. oaiUsage := info.ClaudeConvertInfo.Usage
  281. if oaiUsage != nil {
  282. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  283. Type: "message_delta",
  284. Usage: &dto.ClaudeUsage{
  285. InputTokens: oaiUsage.PromptTokens,
  286. OutputTokens: oaiUsage.CompletionTokens,
  287. CacheCreationInputTokens: oaiUsage.PromptTokensDetails.CachedCreationTokens,
  288. CacheReadInputTokens: oaiUsage.PromptTokensDetails.CachedTokens,
  289. },
  290. Delta: &dto.ClaudeMediaMessage{
  291. StopReason: common.GetPointer[string](stopReasonOpenAI2Claude(info.FinishReason)),
  292. },
  293. })
  294. }
  295. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  296. Type: "message_stop",
  297. })
  298. } else {
  299. var claudeResponse dto.ClaudeResponse
  300. var isEmpty bool
  301. claudeResponse.Type = "content_block_delta"
  302. if len(chosenChoice.Delta.ToolCalls) > 0 {
  303. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeTools {
  304. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  305. info.ClaudeConvertInfo.Index++
  306. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  307. Index: &info.ClaudeConvertInfo.Index,
  308. Type: "content_block_start",
  309. ContentBlock: &dto.ClaudeMediaMessage{
  310. Id: openAIResponse.GetFirstToolCall().ID,
  311. Type: "tool_use",
  312. Name: openAIResponse.GetFirstToolCall().Function.Name,
  313. Input: map[string]interface{}{},
  314. },
  315. })
  316. }
  317. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeTools
  318. // tools delta
  319. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  320. Type: "input_json_delta",
  321. PartialJson: &chosenChoice.Delta.ToolCalls[0].Function.Arguments,
  322. }
  323. } else {
  324. reasoning := chosenChoice.Delta.GetReasoningContent()
  325. textContent := chosenChoice.Delta.GetContentString()
  326. if reasoning != "" || textContent != "" {
  327. if reasoning != "" {
  328. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeThinking {
  329. //info.ClaudeConvertInfo.Index++
  330. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  331. Index: &info.ClaudeConvertInfo.Index,
  332. Type: "content_block_start",
  333. ContentBlock: &dto.ClaudeMediaMessage{
  334. Type: "thinking",
  335. Thinking: "",
  336. },
  337. })
  338. }
  339. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeThinking
  340. // text delta
  341. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  342. Type: "thinking_delta",
  343. Thinking: reasoning,
  344. }
  345. } else {
  346. if info.ClaudeConvertInfo.LastMessagesType != relaycommon.LastMessageTypeText {
  347. if info.LastMessagesType == relaycommon.LastMessageTypeThinking || info.LastMessagesType == relaycommon.LastMessageTypeTools {
  348. claudeResponses = append(claudeResponses, generateStopBlock(info.ClaudeConvertInfo.Index))
  349. info.ClaudeConvertInfo.Index++
  350. }
  351. claudeResponses = append(claudeResponses, &dto.ClaudeResponse{
  352. Index: &info.ClaudeConvertInfo.Index,
  353. Type: "content_block_start",
  354. ContentBlock: &dto.ClaudeMediaMessage{
  355. Type: "text",
  356. Text: common.GetPointer[string](""),
  357. },
  358. })
  359. }
  360. info.ClaudeConvertInfo.LastMessagesType = relaycommon.LastMessageTypeText
  361. // text delta
  362. claudeResponse.Delta = &dto.ClaudeMediaMessage{
  363. Type: "text_delta",
  364. Text: common.GetPointer[string](textContent),
  365. }
  366. }
  367. } else {
  368. isEmpty = true
  369. }
  370. }
  371. claudeResponse.Index = &info.ClaudeConvertInfo.Index
  372. if !isEmpty {
  373. claudeResponses = append(claudeResponses, &claudeResponse)
  374. }
  375. }
  376. }
  377. return claudeResponses
  378. }
  379. func ResponseOpenAI2Claude(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.ClaudeResponse {
  380. var stopReason string
  381. contents := make([]dto.ClaudeMediaMessage, 0)
  382. claudeResponse := &dto.ClaudeResponse{
  383. Id: openAIResponse.Id,
  384. Type: "message",
  385. Role: "assistant",
  386. Model: openAIResponse.Model,
  387. }
  388. for _, choice := range openAIResponse.Choices {
  389. stopReason = stopReasonOpenAI2Claude(choice.FinishReason)
  390. if choice.FinishReason == "tool_calls" {
  391. for _, toolUse := range choice.Message.ParseToolCalls() {
  392. claudeContent := dto.ClaudeMediaMessage{}
  393. claudeContent.Type = "tool_use"
  394. claudeContent.Id = toolUse.ID
  395. claudeContent.Name = toolUse.Function.Name
  396. var mapParams map[string]interface{}
  397. if err := common.Unmarshal([]byte(toolUse.Function.Arguments), &mapParams); err == nil {
  398. claudeContent.Input = mapParams
  399. } else {
  400. claudeContent.Input = toolUse.Function.Arguments
  401. }
  402. contents = append(contents, claudeContent)
  403. }
  404. } else {
  405. claudeContent := dto.ClaudeMediaMessage{}
  406. claudeContent.Type = "text"
  407. claudeContent.SetText(choice.Message.StringContent())
  408. contents = append(contents, claudeContent)
  409. }
  410. }
  411. claudeResponse.Content = contents
  412. claudeResponse.StopReason = stopReason
  413. claudeResponse.Usage = &dto.ClaudeUsage{
  414. InputTokens: openAIResponse.PromptTokens,
  415. OutputTokens: openAIResponse.CompletionTokens,
  416. }
  417. return claudeResponse
  418. }
  419. func stopReasonOpenAI2Claude(reason string) string {
  420. switch reason {
  421. case "stop":
  422. return "end_turn"
  423. case "stop_sequence":
  424. return "stop_sequence"
  425. case "length":
  426. fallthrough
  427. case "max_tokens":
  428. return "max_tokens"
  429. case "tool_calls":
  430. return "tool_use"
  431. default:
  432. return reason
  433. }
  434. }
  435. func toJSONString(v interface{}) string {
  436. b, err := json.Marshal(v)
  437. if err != nil {
  438. return "{}"
  439. }
  440. return string(b)
  441. }
  442. func GeminiToOpenAIRequest(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo) (*dto.GeneralOpenAIRequest, error) {
  443. openaiRequest := &dto.GeneralOpenAIRequest{
  444. Model: info.UpstreamModelName,
  445. Stream: info.IsStream,
  446. }
  447. // 转换 messages
  448. var messages []dto.Message
  449. for _, content := range geminiRequest.Contents {
  450. message := dto.Message{
  451. Role: convertGeminiRoleToOpenAI(content.Role),
  452. }
  453. // 处理 parts
  454. var mediaContents []dto.MediaContent
  455. var toolCalls []dto.ToolCallRequest
  456. for _, part := range content.Parts {
  457. if part.Text != "" {
  458. mediaContent := dto.MediaContent{
  459. Type: "text",
  460. Text: part.Text,
  461. }
  462. mediaContents = append(mediaContents, mediaContent)
  463. } else if part.InlineData != nil {
  464. mediaContent := dto.MediaContent{
  465. Type: "image_url",
  466. ImageUrl: &dto.MessageImageUrl{
  467. Url: fmt.Sprintf("data:%s;base64,%s", part.InlineData.MimeType, part.InlineData.Data),
  468. Detail: "auto",
  469. MimeType: part.InlineData.MimeType,
  470. },
  471. }
  472. mediaContents = append(mediaContents, mediaContent)
  473. } else if part.FileData != nil {
  474. mediaContent := dto.MediaContent{
  475. Type: "image_url",
  476. ImageUrl: &dto.MessageImageUrl{
  477. Url: part.FileData.FileUri,
  478. Detail: "auto",
  479. MimeType: part.FileData.MimeType,
  480. },
  481. }
  482. mediaContents = append(mediaContents, mediaContent)
  483. } else if part.FunctionCall != nil {
  484. // 处理 Gemini 的工具调用
  485. toolCall := dto.ToolCallRequest{
  486. ID: fmt.Sprintf("call_%d", len(toolCalls)+1), // 生成唯一ID
  487. Type: "function",
  488. Function: dto.FunctionRequest{
  489. Name: part.FunctionCall.FunctionName,
  490. Arguments: toJSONString(part.FunctionCall.Arguments),
  491. },
  492. }
  493. toolCalls = append(toolCalls, toolCall)
  494. } else if part.FunctionResponse != nil {
  495. // 处理 Gemini 的工具响应,创建单独的 tool 消息
  496. toolMessage := dto.Message{
  497. Role: "tool",
  498. ToolCallId: fmt.Sprintf("call_%d", len(toolCalls)), // 使用对应的调用ID
  499. }
  500. toolMessage.SetStringContent(toJSONString(part.FunctionResponse.Response))
  501. messages = append(messages, toolMessage)
  502. }
  503. }
  504. // 设置消息内容
  505. if len(toolCalls) > 0 {
  506. // 如果有工具调用,设置工具调用
  507. message.SetToolCalls(toolCalls)
  508. } else if len(mediaContents) == 1 && mediaContents[0].Type == "text" {
  509. // 如果只有一个文本内容,直接设置字符串
  510. message.Content = mediaContents[0].Text
  511. } else if len(mediaContents) > 0 {
  512. // 如果有多个内容或包含媒体,设置为数组
  513. message.SetMediaContent(mediaContents)
  514. }
  515. // 只有当消息有内容或工具调用时才添加
  516. if len(message.ParseContent()) > 0 || len(message.ToolCalls) > 0 {
  517. messages = append(messages, message)
  518. }
  519. }
  520. openaiRequest.Messages = messages
  521. if geminiRequest.GenerationConfig.Temperature != nil {
  522. openaiRequest.Temperature = geminiRequest.GenerationConfig.Temperature
  523. }
  524. if geminiRequest.GenerationConfig.TopP > 0 {
  525. openaiRequest.TopP = geminiRequest.GenerationConfig.TopP
  526. }
  527. if geminiRequest.GenerationConfig.TopK > 0 {
  528. openaiRequest.TopK = int(geminiRequest.GenerationConfig.TopK)
  529. }
  530. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  531. openaiRequest.MaxTokens = geminiRequest.GenerationConfig.MaxOutputTokens
  532. }
  533. // gemini stop sequences 最多 5 个,openai stop 最多 4 个
  534. if len(geminiRequest.GenerationConfig.StopSequences) > 0 {
  535. openaiRequest.Stop = geminiRequest.GenerationConfig.StopSequences[:4]
  536. }
  537. if geminiRequest.GenerationConfig.CandidateCount > 0 {
  538. openaiRequest.N = geminiRequest.GenerationConfig.CandidateCount
  539. }
  540. // 转换工具调用
  541. if len(geminiRequest.Tools) > 0 {
  542. var tools []dto.ToolCallRequest
  543. for _, tool := range geminiRequest.Tools {
  544. if tool.FunctionDeclarations != nil {
  545. // 将 Gemini 的 FunctionDeclarations 转换为 OpenAI 的 ToolCallRequest
  546. functionDeclarations, ok := tool.FunctionDeclarations.([]dto.FunctionRequest)
  547. if ok {
  548. for _, function := range functionDeclarations {
  549. openAITool := dto.ToolCallRequest{
  550. Type: "function",
  551. Function: dto.FunctionRequest{
  552. Name: function.Name,
  553. Description: function.Description,
  554. Parameters: function.Parameters,
  555. },
  556. }
  557. tools = append(tools, openAITool)
  558. }
  559. }
  560. }
  561. }
  562. if len(tools) > 0 {
  563. openaiRequest.Tools = tools
  564. }
  565. }
  566. // gemini system instructions
  567. if geminiRequest.SystemInstructions != nil {
  568. // 将系统指令作为第一条消息插入
  569. systemMessage := dto.Message{
  570. Role: "system",
  571. Content: extractTextFromGeminiParts(geminiRequest.SystemInstructions.Parts),
  572. }
  573. openaiRequest.Messages = append([]dto.Message{systemMessage}, openaiRequest.Messages...)
  574. }
  575. return openaiRequest, nil
  576. }
  577. func convertGeminiRoleToOpenAI(geminiRole string) string {
  578. switch geminiRole {
  579. case "user":
  580. return "user"
  581. case "model":
  582. return "assistant"
  583. case "function":
  584. return "function"
  585. default:
  586. return "user"
  587. }
  588. }
  589. func extractTextFromGeminiParts(parts []dto.GeminiPart) string {
  590. var texts []string
  591. for _, part := range parts {
  592. if part.Text != "" {
  593. texts = append(texts, part.Text)
  594. }
  595. }
  596. return strings.Join(texts, "\n")
  597. }
  598. // ResponseOpenAI2Gemini 将 OpenAI 响应转换为 Gemini 格式
  599. func ResponseOpenAI2Gemini(openAIResponse *dto.OpenAITextResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  600. geminiResponse := &dto.GeminiChatResponse{
  601. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  602. PromptFeedback: dto.GeminiChatPromptFeedback{
  603. SafetyRatings: []dto.GeminiChatSafetyRating{},
  604. },
  605. UsageMetadata: dto.GeminiUsageMetadata{
  606. PromptTokenCount: openAIResponse.PromptTokens,
  607. CandidatesTokenCount: openAIResponse.CompletionTokens,
  608. TotalTokenCount: openAIResponse.PromptTokens + openAIResponse.CompletionTokens,
  609. },
  610. }
  611. for _, choice := range openAIResponse.Choices {
  612. candidate := dto.GeminiChatCandidate{
  613. Index: int64(choice.Index),
  614. SafetyRatings: []dto.GeminiChatSafetyRating{},
  615. }
  616. // 设置结束原因
  617. var finishReason string
  618. switch choice.FinishReason {
  619. case "stop":
  620. finishReason = "STOP"
  621. case "length":
  622. finishReason = "MAX_TOKENS"
  623. case "content_filter":
  624. finishReason = "SAFETY"
  625. case "tool_calls":
  626. finishReason = "STOP"
  627. default:
  628. finishReason = "STOP"
  629. }
  630. candidate.FinishReason = &finishReason
  631. // 转换消息内容
  632. content := dto.GeminiChatContent{
  633. Role: "model",
  634. Parts: make([]dto.GeminiPart, 0),
  635. }
  636. // 处理工具调用
  637. toolCalls := choice.Message.ParseToolCalls()
  638. if len(toolCalls) > 0 {
  639. for _, toolCall := range toolCalls {
  640. // 解析参数
  641. var args map[string]interface{}
  642. if toolCall.Function.Arguments != "" {
  643. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  644. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  645. }
  646. } else {
  647. args = make(map[string]interface{})
  648. }
  649. part := dto.GeminiPart{
  650. FunctionCall: &dto.FunctionCall{
  651. FunctionName: toolCall.Function.Name,
  652. Arguments: args,
  653. },
  654. }
  655. content.Parts = append(content.Parts, part)
  656. }
  657. } else {
  658. // 处理文本内容
  659. textContent := choice.Message.StringContent()
  660. if textContent != "" {
  661. part := dto.GeminiPart{
  662. Text: textContent,
  663. }
  664. content.Parts = append(content.Parts, part)
  665. }
  666. }
  667. candidate.Content = content
  668. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  669. }
  670. return geminiResponse
  671. }
  672. // StreamResponseOpenAI2Gemini 将 OpenAI 流式响应转换为 Gemini 格式
  673. func StreamResponseOpenAI2Gemini(openAIResponse *dto.ChatCompletionsStreamResponse, info *relaycommon.RelayInfo) *dto.GeminiChatResponse {
  674. // 检查是否有实际内容或结束标志
  675. hasContent := false
  676. hasFinishReason := false
  677. for _, choice := range openAIResponse.Choices {
  678. if len(choice.Delta.GetContentString()) > 0 || (choice.Delta.ToolCalls != nil && len(choice.Delta.ToolCalls) > 0) {
  679. hasContent = true
  680. }
  681. if choice.FinishReason != nil {
  682. hasFinishReason = true
  683. }
  684. }
  685. // 如果没有实际内容且没有结束标志,跳过。主要针对 openai 流响应开头的空数据
  686. if !hasContent && !hasFinishReason {
  687. return nil
  688. }
  689. geminiResponse := &dto.GeminiChatResponse{
  690. Candidates: make([]dto.GeminiChatCandidate, 0, len(openAIResponse.Choices)),
  691. PromptFeedback: dto.GeminiChatPromptFeedback{
  692. SafetyRatings: []dto.GeminiChatSafetyRating{},
  693. },
  694. UsageMetadata: dto.GeminiUsageMetadata{
  695. PromptTokenCount: info.PromptTokens,
  696. CandidatesTokenCount: 0, // 流式响应中可能没有完整的 usage 信息
  697. TotalTokenCount: info.PromptTokens,
  698. },
  699. }
  700. for _, choice := range openAIResponse.Choices {
  701. candidate := dto.GeminiChatCandidate{
  702. Index: int64(choice.Index),
  703. SafetyRatings: []dto.GeminiChatSafetyRating{},
  704. }
  705. // 设置结束原因
  706. if choice.FinishReason != nil {
  707. var finishReason string
  708. switch *choice.FinishReason {
  709. case "stop":
  710. finishReason = "STOP"
  711. case "length":
  712. finishReason = "MAX_TOKENS"
  713. case "content_filter":
  714. finishReason = "SAFETY"
  715. case "tool_calls":
  716. finishReason = "STOP"
  717. default:
  718. finishReason = "STOP"
  719. }
  720. candidate.FinishReason = &finishReason
  721. }
  722. // 转换消息内容
  723. content := dto.GeminiChatContent{
  724. Role: "model",
  725. Parts: make([]dto.GeminiPart, 0),
  726. }
  727. // 处理工具调用
  728. if choice.Delta.ToolCalls != nil {
  729. for _, toolCall := range choice.Delta.ToolCalls {
  730. // 解析参数
  731. var args map[string]interface{}
  732. if toolCall.Function.Arguments != "" {
  733. if err := json.Unmarshal([]byte(toolCall.Function.Arguments), &args); err != nil {
  734. args = map[string]interface{}{"arguments": toolCall.Function.Arguments}
  735. }
  736. } else {
  737. args = make(map[string]interface{})
  738. }
  739. part := dto.GeminiPart{
  740. FunctionCall: &dto.FunctionCall{
  741. FunctionName: toolCall.Function.Name,
  742. Arguments: args,
  743. },
  744. }
  745. content.Parts = append(content.Parts, part)
  746. }
  747. } else {
  748. // 处理文本内容
  749. textContent := choice.Delta.GetContentString()
  750. if textContent != "" {
  751. part := dto.GeminiPart{
  752. Text: textContent,
  753. }
  754. content.Parts = append(content.Parts, part)
  755. }
  756. }
  757. candidate.Content = content
  758. geminiResponse.Candidates = append(geminiResponse.Candidates, candidate)
  759. }
  760. return geminiResponse
  761. }