relay-gemini.go 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "io"
  6. "net/http"
  7. "one-api/common"
  8. "one-api/constant"
  9. "one-api/dto"
  10. relaycommon "one-api/relay/common"
  11. "one-api/relay/helper"
  12. "one-api/service"
  13. "one-api/setting/model_setting"
  14. "strings"
  15. "unicode/utf8"
  16. "github.com/gin-gonic/gin"
  17. )
  18. var geminiSupportedMimeTypes = map[string]bool{
  19. "application/pdf": true,
  20. "audio/mpeg": true,
  21. "audio/mp3": true,
  22. "audio/wav": true,
  23. "image/png": true,
  24. "image/jpeg": true,
  25. "text/plain": true,
  26. "video/mov": true,
  27. "video/mpeg": true,
  28. "video/mp4": true,
  29. "video/mpg": true,
  30. "video/avi": true,
  31. "video/wmv": true,
  32. "video/mpegps": true,
  33. "video/flv": true,
  34. }
  35. // Setting safety to the lowest possible values since Gemini is already powerless enough
  36. func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*GeminiChatRequest, error) {
  37. geminiRequest := GeminiChatRequest{
  38. Contents: make([]GeminiChatContent, 0, len(textRequest.Messages)),
  39. GenerationConfig: GeminiChatGenerationConfig{
  40. Temperature: textRequest.Temperature,
  41. TopP: textRequest.TopP,
  42. MaxOutputTokens: textRequest.MaxTokens,
  43. Seed: int64(textRequest.Seed),
  44. },
  45. }
  46. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  47. geminiRequest.GenerationConfig.ResponseModalities = []string{
  48. "TEXT",
  49. "IMAGE",
  50. }
  51. }
  52. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  53. if strings.HasSuffix(info.OriginModelName, "-thinking") {
  54. // 如果模型名以 gemini-2.5-pro 开头,不设置 ThinkingBudget
  55. if strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro") {
  56. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  57. IncludeThoughts: true,
  58. }
  59. } else {
  60. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  61. if budgetTokens == 0 || budgetTokens > 24576 {
  62. budgetTokens = 24576
  63. }
  64. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  65. ThinkingBudget: common.GetPointer(int(budgetTokens)),
  66. IncludeThoughts: true,
  67. }
  68. }
  69. } else if strings.HasSuffix(info.OriginModelName, "-nothinking") {
  70. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  71. ThinkingBudget: common.GetPointer(0),
  72. }
  73. }
  74. }
  75. safetySettings := make([]GeminiChatSafetySettings, 0, len(SafetySettingList))
  76. for _, category := range SafetySettingList {
  77. safetySettings = append(safetySettings, GeminiChatSafetySettings{
  78. Category: category,
  79. Threshold: model_setting.GetGeminiSafetySetting(category),
  80. })
  81. }
  82. geminiRequest.SafetySettings = safetySettings
  83. // openaiContent.FuncToToolCalls()
  84. if textRequest.Tools != nil {
  85. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  86. googleSearch := false
  87. codeExecution := false
  88. for _, tool := range textRequest.Tools {
  89. if tool.Function.Name == "googleSearch" {
  90. googleSearch = true
  91. continue
  92. }
  93. if tool.Function.Name == "codeExecution" {
  94. codeExecution = true
  95. continue
  96. }
  97. if tool.Function.Parameters != nil {
  98. params, ok := tool.Function.Parameters.(map[string]interface{})
  99. if ok {
  100. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  101. if len(props) == 0 {
  102. tool.Function.Parameters = nil
  103. }
  104. }
  105. }
  106. }
  107. // Clean the parameters before appending
  108. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  109. tool.Function.Parameters = cleanedParams
  110. functions = append(functions, tool.Function)
  111. }
  112. if codeExecution {
  113. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  114. CodeExecution: make(map[string]string),
  115. })
  116. }
  117. if googleSearch {
  118. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  119. GoogleSearch: make(map[string]string),
  120. })
  121. }
  122. if len(functions) > 0 {
  123. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  124. FunctionDeclarations: functions,
  125. })
  126. }
  127. // common.SysLog("tools: " + fmt.Sprintf("%+v", geminiRequest.Tools))
  128. // json_data, _ := json.Marshal(geminiRequest.Tools)
  129. // common.SysLog("tools_json: " + string(json_data))
  130. } else if textRequest.Functions != nil {
  131. //geminiRequest.Tools = []GeminiChatTool{
  132. // {
  133. // FunctionDeclarations: textRequest.Functions,
  134. // },
  135. //}
  136. }
  137. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  138. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  139. if textRequest.ResponseFormat.JsonSchema != nil && textRequest.ResponseFormat.JsonSchema.Schema != nil {
  140. cleanedSchema := removeAdditionalPropertiesWithDepth(textRequest.ResponseFormat.JsonSchema.Schema, 0)
  141. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  142. }
  143. }
  144. tool_call_ids := make(map[string]string)
  145. var system_content []string
  146. //shouldAddDummyModelMessage := false
  147. for _, message := range textRequest.Messages {
  148. if message.Role == "system" {
  149. system_content = append(system_content, message.StringContent())
  150. continue
  151. } else if message.Role == "tool" || message.Role == "function" {
  152. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  153. geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{
  154. Role: "user",
  155. })
  156. }
  157. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  158. name := ""
  159. if message.Name != nil {
  160. name = *message.Name
  161. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  162. name = val
  163. }
  164. content := common.StrToMap(message.StringContent())
  165. functionResp := &FunctionResponse{
  166. Name: name,
  167. Response: GeminiFunctionResponseContent{
  168. Name: name,
  169. Content: content,
  170. },
  171. }
  172. if content == nil {
  173. functionResp.Response.Content = message.StringContent()
  174. }
  175. *parts = append(*parts, GeminiPart{
  176. FunctionResponse: functionResp,
  177. })
  178. continue
  179. }
  180. var parts []GeminiPart
  181. content := GeminiChatContent{
  182. Role: message.Role,
  183. }
  184. // isToolCall := false
  185. if message.ToolCalls != nil {
  186. // message.Role = "model"
  187. // isToolCall = true
  188. for _, call := range message.ParseToolCalls() {
  189. args := map[string]interface{}{}
  190. if call.Function.Arguments != "" {
  191. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  192. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  193. }
  194. }
  195. toolCall := GeminiPart{
  196. FunctionCall: &FunctionCall{
  197. FunctionName: call.Function.Name,
  198. Arguments: args,
  199. },
  200. }
  201. parts = append(parts, toolCall)
  202. tool_call_ids[call.ID] = call.Function.Name
  203. }
  204. }
  205. openaiContent := message.ParseContent()
  206. imageNum := 0
  207. for _, part := range openaiContent {
  208. if part.Type == dto.ContentTypeText {
  209. if part.Text == "" {
  210. continue
  211. }
  212. parts = append(parts, GeminiPart{
  213. Text: part.Text,
  214. })
  215. } else if part.Type == dto.ContentTypeImageURL {
  216. imageNum += 1
  217. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  218. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  219. }
  220. // 判断是否是url
  221. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  222. // 是url,获取文件的类型和base64编码的数据
  223. fileData, err := service.GetFileBase64FromUrl(part.GetImageMedia().Url)
  224. if err != nil {
  225. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  226. }
  227. // 校验 MimeType 是否在 Gemini 支持的白名单中
  228. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  229. return nil, fmt.Errorf("MIME type '%s' from URL '%s' is not supported by Gemini. Supported types are: %v", fileData.MimeType, part.GetImageMedia().Url, getSupportedMimeTypesList())
  230. }
  231. parts = append(parts, GeminiPart{
  232. InlineData: &GeminiInlineData{
  233. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  234. Data: fileData.Base64Data,
  235. },
  236. })
  237. } else {
  238. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  239. if err != nil {
  240. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  241. }
  242. parts = append(parts, GeminiPart{
  243. InlineData: &GeminiInlineData{
  244. MimeType: format,
  245. Data: base64String,
  246. },
  247. })
  248. }
  249. } else if part.Type == dto.ContentTypeFile {
  250. if part.GetFile().FileId != "" {
  251. return nil, fmt.Errorf("only base64 file is supported in gemini")
  252. }
  253. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  254. if err != nil {
  255. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  256. }
  257. parts = append(parts, GeminiPart{
  258. InlineData: &GeminiInlineData{
  259. MimeType: format,
  260. Data: base64String,
  261. },
  262. })
  263. } else if part.Type == dto.ContentTypeInputAudio {
  264. if part.GetInputAudio().Data == "" {
  265. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  266. }
  267. format, base64String, err := service.DecodeBase64FileData(part.GetInputAudio().Data)
  268. if err != nil {
  269. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  270. }
  271. parts = append(parts, GeminiPart{
  272. InlineData: &GeminiInlineData{
  273. MimeType: format,
  274. Data: base64String,
  275. },
  276. })
  277. }
  278. }
  279. content.Parts = parts
  280. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  281. if content.Role == "assistant" {
  282. content.Role = "model"
  283. }
  284. geminiRequest.Contents = append(geminiRequest.Contents, content)
  285. }
  286. if len(system_content) > 0 {
  287. geminiRequest.SystemInstructions = &GeminiChatContent{
  288. Parts: []GeminiPart{
  289. {
  290. Text: strings.Join(system_content, "\n"),
  291. },
  292. },
  293. }
  294. }
  295. return &geminiRequest, nil
  296. }
  297. // Helper function to get a list of supported MIME types for error messages
  298. func getSupportedMimeTypesList() []string {
  299. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  300. for k := range geminiSupportedMimeTypes {
  301. keys = append(keys, k)
  302. }
  303. return keys
  304. }
  305. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  306. func cleanFunctionParameters(params interface{}) interface{} {
  307. if params == nil {
  308. return nil
  309. }
  310. switch v := params.(type) {
  311. case map[string]interface{}:
  312. // Create a copy to avoid modifying the original
  313. cleanedMap := make(map[string]interface{})
  314. for k, val := range v {
  315. cleanedMap[k] = val
  316. }
  317. // Remove unsupported root-level fields
  318. delete(cleanedMap, "default")
  319. delete(cleanedMap, "exclusiveMaximum")
  320. delete(cleanedMap, "exclusiveMinimum")
  321. delete(cleanedMap, "$schema")
  322. delete(cleanedMap, "additionalProperties")
  323. // Check and clean 'format' for string types
  324. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  325. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  326. if formatValue != "enum" && formatValue != "date-time" {
  327. delete(cleanedMap, "format")
  328. }
  329. }
  330. }
  331. // Clean properties
  332. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  333. cleanedProps := make(map[string]interface{})
  334. for propName, propValue := range props {
  335. cleanedProps[propName] = cleanFunctionParameters(propValue)
  336. }
  337. cleanedMap["properties"] = cleanedProps
  338. }
  339. // Recursively clean items in arrays
  340. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  341. cleanedMap["items"] = cleanFunctionParameters(items)
  342. }
  343. // Also handle items if it's an array of schemas
  344. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  345. cleanedItemsArray := make([]interface{}, len(itemsArray))
  346. for i, item := range itemsArray {
  347. cleanedItemsArray[i] = cleanFunctionParameters(item)
  348. }
  349. cleanedMap["items"] = cleanedItemsArray
  350. }
  351. // Recursively clean other schema composition keywords
  352. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  353. if nested, ok := cleanedMap[field].([]interface{}); ok {
  354. cleanedNested := make([]interface{}, len(nested))
  355. for i, item := range nested {
  356. cleanedNested[i] = cleanFunctionParameters(item)
  357. }
  358. cleanedMap[field] = cleanedNested
  359. }
  360. }
  361. // Recursively clean patternProperties
  362. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  363. cleanedPatternProps := make(map[string]interface{})
  364. for pattern, schema := range patternProps {
  365. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  366. }
  367. cleanedMap["patternProperties"] = cleanedPatternProps
  368. }
  369. // Recursively clean definitions
  370. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  371. cleanedDefinitions := make(map[string]interface{})
  372. for defName, defSchema := range definitions {
  373. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  374. }
  375. cleanedMap["definitions"] = cleanedDefinitions
  376. }
  377. // Recursively clean $defs (newer JSON Schema draft)
  378. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  379. cleanedDefs := make(map[string]interface{})
  380. for defName, defSchema := range defs {
  381. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  382. }
  383. cleanedMap["$defs"] = cleanedDefs
  384. }
  385. // Clean conditional keywords
  386. for _, field := range []string{"if", "then", "else", "not"} {
  387. if nested, ok := cleanedMap[field]; ok {
  388. cleanedMap[field] = cleanFunctionParameters(nested)
  389. }
  390. }
  391. return cleanedMap
  392. case []interface{}:
  393. // Handle arrays of schemas
  394. cleanedArray := make([]interface{}, len(v))
  395. for i, item := range v {
  396. cleanedArray[i] = cleanFunctionParameters(item)
  397. }
  398. return cleanedArray
  399. default:
  400. // Not a map or array, return as is (e.g., could be a primitive)
  401. return params
  402. }
  403. }
  404. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  405. if depth >= 5 {
  406. return schema
  407. }
  408. v, ok := schema.(map[string]interface{})
  409. if !ok || len(v) == 0 {
  410. return schema
  411. }
  412. // 删除所有的title字段
  413. delete(v, "title")
  414. delete(v, "$schema")
  415. // 如果type不为object和array,则直接返回
  416. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  417. return schema
  418. }
  419. switch v["type"] {
  420. case "object":
  421. delete(v, "additionalProperties")
  422. // 处理 properties
  423. if properties, ok := v["properties"].(map[string]interface{}); ok {
  424. for key, value := range properties {
  425. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  426. }
  427. }
  428. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  429. if nested, ok := v[field].([]interface{}); ok {
  430. for i, item := range nested {
  431. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  432. }
  433. }
  434. }
  435. case "array":
  436. if items, ok := v["items"].(map[string]interface{}); ok {
  437. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  438. }
  439. }
  440. return v
  441. }
  442. func unescapeString(s string) (string, error) {
  443. var result []rune
  444. escaped := false
  445. i := 0
  446. for i < len(s) {
  447. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  448. if r == utf8.RuneError {
  449. return "", fmt.Errorf("invalid UTF-8 encoding")
  450. }
  451. if escaped {
  452. // 如果是转义符后的字符,检查其类型
  453. switch r {
  454. case '"':
  455. result = append(result, '"')
  456. case '\\':
  457. result = append(result, '\\')
  458. case '/':
  459. result = append(result, '/')
  460. case 'b':
  461. result = append(result, '\b')
  462. case 'f':
  463. result = append(result, '\f')
  464. case 'n':
  465. result = append(result, '\n')
  466. case 'r':
  467. result = append(result, '\r')
  468. case 't':
  469. result = append(result, '\t')
  470. case '\'':
  471. result = append(result, '\'')
  472. default:
  473. // 如果遇到一个非法的转义字符,直接按原样输出
  474. result = append(result, '\\', r)
  475. }
  476. escaped = false
  477. } else {
  478. if r == '\\' {
  479. escaped = true // 记录反斜杠作为转义符
  480. } else {
  481. result = append(result, r)
  482. }
  483. }
  484. i += size // 移动到下一个字符
  485. }
  486. return string(result), nil
  487. }
  488. func unescapeMapOrSlice(data interface{}) interface{} {
  489. switch v := data.(type) {
  490. case map[string]interface{}:
  491. for k, val := range v {
  492. v[k] = unescapeMapOrSlice(val)
  493. }
  494. case []interface{}:
  495. for i, val := range v {
  496. v[i] = unescapeMapOrSlice(val)
  497. }
  498. case string:
  499. if unescaped, err := unescapeString(v); err != nil {
  500. return v
  501. } else {
  502. return unescaped
  503. }
  504. }
  505. return data
  506. }
  507. func getResponseToolCall(item *GeminiPart) *dto.ToolCallResponse {
  508. var argsBytes []byte
  509. var err error
  510. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  511. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  512. } else {
  513. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  514. }
  515. if err != nil {
  516. return nil
  517. }
  518. return &dto.ToolCallResponse{
  519. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  520. Type: "function",
  521. Function: dto.FunctionResponse{
  522. Arguments: string(argsBytes),
  523. Name: item.FunctionCall.FunctionName,
  524. },
  525. }
  526. }
  527. func responseGeminiChat2OpenAI(response *GeminiChatResponse) *dto.OpenAITextResponse {
  528. fullTextResponse := dto.OpenAITextResponse{
  529. Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
  530. Object: "chat.completion",
  531. Created: common.GetTimestamp(),
  532. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  533. }
  534. content, _ := json.Marshal("")
  535. isToolCall := false
  536. for _, candidate := range response.Candidates {
  537. choice := dto.OpenAITextResponseChoice{
  538. Index: int(candidate.Index),
  539. Message: dto.Message{
  540. Role: "assistant",
  541. Content: content,
  542. },
  543. FinishReason: constant.FinishReasonStop,
  544. }
  545. if len(candidate.Content.Parts) > 0 {
  546. var texts []string
  547. var toolCalls []dto.ToolCallResponse
  548. for _, part := range candidate.Content.Parts {
  549. if part.FunctionCall != nil {
  550. choice.FinishReason = constant.FinishReasonToolCalls
  551. if call := getResponseToolCall(&part); call != nil {
  552. toolCalls = append(toolCalls, *call)
  553. }
  554. } else if part.Thought {
  555. choice.Message.ReasoningContent = part.Text
  556. } else {
  557. if part.ExecutableCode != nil {
  558. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  559. } else if part.CodeExecutionResult != nil {
  560. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  561. } else {
  562. // 过滤掉空行
  563. if part.Text != "\n" {
  564. texts = append(texts, part.Text)
  565. }
  566. }
  567. }
  568. }
  569. if len(toolCalls) > 0 {
  570. choice.Message.SetToolCalls(toolCalls)
  571. isToolCall = true
  572. }
  573. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  574. }
  575. if candidate.FinishReason != nil {
  576. switch *candidate.FinishReason {
  577. case "STOP":
  578. choice.FinishReason = constant.FinishReasonStop
  579. case "MAX_TOKENS":
  580. choice.FinishReason = constant.FinishReasonLength
  581. default:
  582. choice.FinishReason = constant.FinishReasonContentFilter
  583. }
  584. }
  585. if isToolCall {
  586. choice.FinishReason = constant.FinishReasonToolCalls
  587. }
  588. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  589. }
  590. return &fullTextResponse
  591. }
  592. func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool, bool) {
  593. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  594. isStop := false
  595. hasImage := false
  596. for _, candidate := range geminiResponse.Candidates {
  597. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  598. isStop = true
  599. candidate.FinishReason = nil
  600. }
  601. choice := dto.ChatCompletionsStreamResponseChoice{
  602. Index: int(candidate.Index),
  603. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  604. Role: "assistant",
  605. },
  606. }
  607. var texts []string
  608. isTools := false
  609. isThought := false
  610. if candidate.FinishReason != nil {
  611. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  612. switch *candidate.FinishReason {
  613. case "STOP":
  614. choice.FinishReason = &constant.FinishReasonStop
  615. case "MAX_TOKENS":
  616. choice.FinishReason = &constant.FinishReasonLength
  617. default:
  618. choice.FinishReason = &constant.FinishReasonContentFilter
  619. }
  620. }
  621. for _, part := range candidate.Content.Parts {
  622. if part.InlineData != nil {
  623. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  624. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  625. texts = append(texts, imgText)
  626. hasImage = true
  627. }
  628. } else if part.FunctionCall != nil {
  629. isTools = true
  630. if call := getResponseToolCall(&part); call != nil {
  631. call.SetIndex(len(choice.Delta.ToolCalls))
  632. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  633. }
  634. } else if part.Thought {
  635. isThought = true
  636. texts = append(texts, part.Text)
  637. } else {
  638. if part.ExecutableCode != nil {
  639. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  640. } else if part.CodeExecutionResult != nil {
  641. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  642. } else {
  643. if part.Text != "\n" {
  644. texts = append(texts, part.Text)
  645. }
  646. }
  647. }
  648. }
  649. if isThought {
  650. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  651. } else {
  652. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  653. }
  654. if isTools {
  655. choice.FinishReason = &constant.FinishReasonToolCalls
  656. }
  657. choices = append(choices, choice)
  658. }
  659. var response dto.ChatCompletionsStreamResponse
  660. response.Object = "chat.completion.chunk"
  661. response.Choices = choices
  662. return &response, isStop, hasImage
  663. }
  664. func GeminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  665. // responseText := ""
  666. id := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
  667. createAt := common.GetTimestamp()
  668. var usage = &dto.Usage{}
  669. var imageCount int
  670. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  671. var geminiResponse GeminiChatResponse
  672. err := common.DecodeJsonStr(data, &geminiResponse)
  673. if err != nil {
  674. common.LogError(c, "error unmarshalling stream response: "+err.Error())
  675. return false
  676. }
  677. response, isStop, hasImage := streamResponseGeminiChat2OpenAI(&geminiResponse)
  678. if hasImage {
  679. imageCount++
  680. }
  681. response.Id = id
  682. response.Created = createAt
  683. response.Model = info.UpstreamModelName
  684. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  685. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  686. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  687. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  688. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  689. }
  690. err = helper.ObjectData(c, response)
  691. if err != nil {
  692. common.LogError(c, err.Error())
  693. }
  694. if isStop {
  695. response := helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
  696. helper.ObjectData(c, response)
  697. }
  698. return true
  699. })
  700. var response *dto.ChatCompletionsStreamResponse
  701. if imageCount != 0 {
  702. if usage.CompletionTokens == 0 {
  703. usage.CompletionTokens = imageCount * 258
  704. }
  705. }
  706. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  707. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  708. if info.ShouldIncludeUsage {
  709. response = helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  710. err := helper.ObjectData(c, response)
  711. if err != nil {
  712. common.SysError("send final response failed: " + err.Error())
  713. }
  714. }
  715. helper.Done(c)
  716. //resp.Body.Close()
  717. return nil, usage
  718. }
  719. func GeminiChatHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  720. responseBody, err := io.ReadAll(resp.Body)
  721. if err != nil {
  722. return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
  723. }
  724. err = resp.Body.Close()
  725. if err != nil {
  726. return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
  727. }
  728. if common.DebugEnabled {
  729. println(string(responseBody))
  730. }
  731. var geminiResponse GeminiChatResponse
  732. err = common.DecodeJson(responseBody, &geminiResponse)
  733. if err != nil {
  734. return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
  735. }
  736. if len(geminiResponse.Candidates) == 0 {
  737. return &dto.OpenAIErrorWithStatusCode{
  738. Error: dto.OpenAIError{
  739. Message: "No candidates returned",
  740. Type: "server_error",
  741. Param: "",
  742. Code: 500,
  743. },
  744. StatusCode: resp.StatusCode,
  745. }, nil
  746. }
  747. fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
  748. fullTextResponse.Model = info.UpstreamModelName
  749. usage := dto.Usage{
  750. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  751. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  752. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  753. }
  754. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  755. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  756. fullTextResponse.Usage = usage
  757. jsonResponse, err := json.Marshal(fullTextResponse)
  758. if err != nil {
  759. return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
  760. }
  761. c.Writer.Header().Set("Content-Type", "application/json")
  762. c.Writer.WriteHeader(resp.StatusCode)
  763. _, err = c.Writer.Write(jsonResponse)
  764. return nil, &usage
  765. }
  766. func GeminiEmbeddingHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *dto.OpenAIErrorWithStatusCode) {
  767. responseBody, readErr := io.ReadAll(resp.Body)
  768. if readErr != nil {
  769. return nil, service.OpenAIErrorWrapper(readErr, "read_response_body_failed", http.StatusInternalServerError)
  770. }
  771. _ = resp.Body.Close()
  772. var geminiResponse GeminiEmbeddingResponse
  773. if jsonErr := json.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  774. return nil, service.OpenAIErrorWrapper(jsonErr, "unmarshal_response_body_failed", http.StatusInternalServerError)
  775. }
  776. // convert to openai format response
  777. openAIResponse := dto.OpenAIEmbeddingResponse{
  778. Object: "list",
  779. Data: []dto.OpenAIEmbeddingResponseItem{
  780. {
  781. Object: "embedding",
  782. Embedding: geminiResponse.Embedding.Values,
  783. Index: 0,
  784. },
  785. },
  786. Model: info.UpstreamModelName,
  787. }
  788. // calculate usage
  789. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  790. // Google has not yet clarified how embedding models will be billed
  791. // refer to openai billing method to use input tokens billing
  792. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  793. usage = &dto.Usage{
  794. PromptTokens: info.PromptTokens,
  795. CompletionTokens: 0,
  796. TotalTokens: info.PromptTokens,
  797. }
  798. openAIResponse.Usage = *usage.(*dto.Usage)
  799. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  800. if jsonErr != nil {
  801. return nil, service.OpenAIErrorWrapper(jsonErr, "marshal_response_failed", http.StatusInternalServerError)
  802. }
  803. c.Writer.Header().Set("Content-Type", "application/json")
  804. c.Writer.WriteHeader(resp.StatusCode)
  805. _, _ = c.Writer.Write(jsonResponse)
  806. return usage, nil
  807. }