relay-gemini.go 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "io"
  6. "net/http"
  7. "one-api/common"
  8. "one-api/constant"
  9. "one-api/dto"
  10. relaycommon "one-api/relay/common"
  11. "one-api/relay/helper"
  12. "one-api/service"
  13. "one-api/setting/model_setting"
  14. "strings"
  15. "unicode/utf8"
  16. "github.com/gin-gonic/gin"
  17. )
  18. var geminiSupportedMimeTypes = map[string]bool{
  19. "application/pdf": true,
  20. "audio/mpeg": true,
  21. "audio/mp3": true,
  22. "audio/wav": true,
  23. "image/png": true,
  24. "image/jpeg": true,
  25. "text/plain": true,
  26. "video/mov": true,
  27. "video/mpeg": true,
  28. "video/mp4": true,
  29. "video/mpg": true,
  30. "video/avi": true,
  31. "video/wmv": true,
  32. "video/mpegps": true,
  33. "video/flv": true,
  34. }
  35. // Setting safety to the lowest possible values since Gemini is already powerless enough
  36. func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*GeminiChatRequest, error) {
  37. geminiRequest := GeminiChatRequest{
  38. Contents: make([]GeminiChatContent, 0, len(textRequest.Messages)),
  39. GenerationConfig: GeminiChatGenerationConfig{
  40. Temperature: textRequest.Temperature,
  41. TopP: textRequest.TopP,
  42. MaxOutputTokens: textRequest.MaxTokens,
  43. Seed: int64(textRequest.Seed),
  44. },
  45. }
  46. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  47. geminiRequest.GenerationConfig.ResponseModalities = []string{
  48. "TEXT",
  49. "IMAGE",
  50. }
  51. }
  52. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  53. if strings.HasSuffix(info.OriginModelName, "-thinking") {
  54. // 硬编码不支持 ThinkingBudget 的旧模型
  55. unsupportedModels := []string{
  56. "gemini-2.5-pro-preview-05-06",
  57. "gemini-2.5-pro-preview-03-25",
  58. }
  59. isUnsupported := false
  60. for _, unsupportedModel := range unsupportedModels {
  61. if strings.HasPrefix(info.OriginModelName, unsupportedModel) {
  62. isUnsupported = true
  63. break
  64. }
  65. }
  66. if isUnsupported {
  67. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  68. IncludeThoughts: true,
  69. }
  70. } else {
  71. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  72. // 检查是否为新的2.5pro模型(支持ThinkingBudget但有特殊范围)
  73. isNew25Pro := strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro") &&
  74. !strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro-preview-05-06") &&
  75. !strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro-preview-03-25")
  76. if isNew25Pro {
  77. // 新的2.5pro模型:ThinkingBudget范围为128-32768
  78. if budgetTokens == 0 || budgetTokens < 128 {
  79. budgetTokens = 128
  80. } else if budgetTokens > 32768 {
  81. budgetTokens = 32768
  82. }
  83. } else {
  84. // 其他模型:ThinkingBudget范围为0-24576
  85. if budgetTokens == 0 || budgetTokens > 24576 {
  86. budgetTokens = 24576
  87. }
  88. }
  89. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  90. ThinkingBudget: common.GetPointer(int(budgetTokens)),
  91. IncludeThoughts: true,
  92. }
  93. }
  94. } else if strings.HasSuffix(info.OriginModelName, "-nothinking") {
  95. // 检查是否为新的2.5pro模型(不支持-nothinking,因为最低值只能为128)
  96. isNew25Pro := strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro") &&
  97. !strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro-preview-05-06") &&
  98. !strings.HasPrefix(info.OriginModelName, "gemini-2.5-pro-preview-03-25")
  99. if !isNew25Pro {
  100. // 只有非新2.5pro模型才支持-nothinking
  101. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  102. ThinkingBudget: common.GetPointer(0),
  103. }
  104. }
  105. }
  106. }
  107. safetySettings := make([]GeminiChatSafetySettings, 0, len(SafetySettingList))
  108. for _, category := range SafetySettingList {
  109. safetySettings = append(safetySettings, GeminiChatSafetySettings{
  110. Category: category,
  111. Threshold: model_setting.GetGeminiSafetySetting(category),
  112. })
  113. }
  114. geminiRequest.SafetySettings = safetySettings
  115. // openaiContent.FuncToToolCalls()
  116. if textRequest.Tools != nil {
  117. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  118. googleSearch := false
  119. codeExecution := false
  120. for _, tool := range textRequest.Tools {
  121. if tool.Function.Name == "googleSearch" {
  122. googleSearch = true
  123. continue
  124. }
  125. if tool.Function.Name == "codeExecution" {
  126. codeExecution = true
  127. continue
  128. }
  129. if tool.Function.Parameters != nil {
  130. params, ok := tool.Function.Parameters.(map[string]interface{})
  131. if ok {
  132. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  133. if len(props) == 0 {
  134. tool.Function.Parameters = nil
  135. }
  136. }
  137. }
  138. }
  139. // Clean the parameters before appending
  140. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  141. tool.Function.Parameters = cleanedParams
  142. functions = append(functions, tool.Function)
  143. }
  144. if codeExecution {
  145. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  146. CodeExecution: make(map[string]string),
  147. })
  148. }
  149. if googleSearch {
  150. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  151. GoogleSearch: make(map[string]string),
  152. })
  153. }
  154. if len(functions) > 0 {
  155. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  156. FunctionDeclarations: functions,
  157. })
  158. }
  159. // common.SysLog("tools: " + fmt.Sprintf("%+v", geminiRequest.Tools))
  160. // json_data, _ := json.Marshal(geminiRequest.Tools)
  161. // common.SysLog("tools_json: " + string(json_data))
  162. }
  163. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  164. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  165. if textRequest.ResponseFormat.JsonSchema != nil && textRequest.ResponseFormat.JsonSchema.Schema != nil {
  166. cleanedSchema := removeAdditionalPropertiesWithDepth(textRequest.ResponseFormat.JsonSchema.Schema, 0)
  167. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  168. }
  169. }
  170. tool_call_ids := make(map[string]string)
  171. var system_content []string
  172. //shouldAddDummyModelMessage := false
  173. for _, message := range textRequest.Messages {
  174. if message.Role == "system" {
  175. system_content = append(system_content, message.StringContent())
  176. continue
  177. } else if message.Role == "tool" || message.Role == "function" {
  178. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  179. geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{
  180. Role: "user",
  181. })
  182. }
  183. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  184. name := ""
  185. if message.Name != nil {
  186. name = *message.Name
  187. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  188. name = val
  189. }
  190. var contentMap map[string]interface{}
  191. contentStr := message.StringContent()
  192. // 1. 尝试解析为 JSON 对象
  193. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  194. // 2. 如果失败,尝试解析为 JSON 数组
  195. var contentSlice []interface{}
  196. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  197. // 如果是数组,包装成对象
  198. contentMap = map[string]interface{}{"result": contentSlice}
  199. } else {
  200. // 3. 如果再次失败,作为纯文本处理
  201. contentMap = map[string]interface{}{"content": contentStr}
  202. }
  203. }
  204. functionResp := &FunctionResponse{
  205. Name: name,
  206. Response: contentMap,
  207. }
  208. *parts = append(*parts, GeminiPart{
  209. FunctionResponse: functionResp,
  210. })
  211. continue
  212. }
  213. var parts []GeminiPart
  214. content := GeminiChatContent{
  215. Role: message.Role,
  216. }
  217. // isToolCall := false
  218. if message.ToolCalls != nil {
  219. // message.Role = "model"
  220. // isToolCall = true
  221. for _, call := range message.ParseToolCalls() {
  222. args := map[string]interface{}{}
  223. if call.Function.Arguments != "" {
  224. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  225. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  226. }
  227. }
  228. toolCall := GeminiPart{
  229. FunctionCall: &FunctionCall{
  230. FunctionName: call.Function.Name,
  231. Arguments: args,
  232. },
  233. }
  234. parts = append(parts, toolCall)
  235. tool_call_ids[call.ID] = call.Function.Name
  236. }
  237. }
  238. openaiContent := message.ParseContent()
  239. imageNum := 0
  240. for _, part := range openaiContent {
  241. if part.Type == dto.ContentTypeText {
  242. if part.Text == "" {
  243. continue
  244. }
  245. parts = append(parts, GeminiPart{
  246. Text: part.Text,
  247. })
  248. } else if part.Type == dto.ContentTypeImageURL {
  249. imageNum += 1
  250. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  251. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  252. }
  253. // 判断是否是url
  254. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  255. // 是url,获取文件的类型和base64编码的数据
  256. fileData, err := service.GetFileBase64FromUrl(part.GetImageMedia().Url)
  257. if err != nil {
  258. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  259. }
  260. // 校验 MimeType 是否在 Gemini 支持的白名单中
  261. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  262. return nil, fmt.Errorf("MIME type '%s' from URL '%s' is not supported by Gemini. Supported types are: %v", fileData.MimeType, part.GetImageMedia().Url, getSupportedMimeTypesList())
  263. }
  264. parts = append(parts, GeminiPart{
  265. InlineData: &GeminiInlineData{
  266. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  267. Data: fileData.Base64Data,
  268. },
  269. })
  270. } else {
  271. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  272. if err != nil {
  273. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  274. }
  275. parts = append(parts, GeminiPart{
  276. InlineData: &GeminiInlineData{
  277. MimeType: format,
  278. Data: base64String,
  279. },
  280. })
  281. }
  282. } else if part.Type == dto.ContentTypeFile {
  283. if part.GetFile().FileId != "" {
  284. return nil, fmt.Errorf("only base64 file is supported in gemini")
  285. }
  286. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  287. if err != nil {
  288. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  289. }
  290. parts = append(parts, GeminiPart{
  291. InlineData: &GeminiInlineData{
  292. MimeType: format,
  293. Data: base64String,
  294. },
  295. })
  296. } else if part.Type == dto.ContentTypeInputAudio {
  297. if part.GetInputAudio().Data == "" {
  298. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  299. }
  300. base64String, err := service.DecodeBase64AudioData(part.GetInputAudio().Data)
  301. if err != nil {
  302. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  303. }
  304. parts = append(parts, GeminiPart{
  305. InlineData: &GeminiInlineData{
  306. MimeType: "audio/" + part.GetInputAudio().Format,
  307. Data: base64String,
  308. },
  309. })
  310. }
  311. }
  312. content.Parts = parts
  313. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  314. if content.Role == "assistant" {
  315. content.Role = "model"
  316. }
  317. geminiRequest.Contents = append(geminiRequest.Contents, content)
  318. }
  319. if len(system_content) > 0 {
  320. geminiRequest.SystemInstructions = &GeminiChatContent{
  321. Parts: []GeminiPart{
  322. {
  323. Text: strings.Join(system_content, "\n"),
  324. },
  325. },
  326. }
  327. }
  328. return &geminiRequest, nil
  329. }
  330. // Helper function to get a list of supported MIME types for error messages
  331. func getSupportedMimeTypesList() []string {
  332. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  333. for k := range geminiSupportedMimeTypes {
  334. keys = append(keys, k)
  335. }
  336. return keys
  337. }
  338. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  339. func cleanFunctionParameters(params interface{}) interface{} {
  340. if params == nil {
  341. return nil
  342. }
  343. switch v := params.(type) {
  344. case map[string]interface{}:
  345. // Create a copy to avoid modifying the original
  346. cleanedMap := make(map[string]interface{})
  347. for k, val := range v {
  348. cleanedMap[k] = val
  349. }
  350. // Remove unsupported root-level fields
  351. delete(cleanedMap, "default")
  352. delete(cleanedMap, "exclusiveMaximum")
  353. delete(cleanedMap, "exclusiveMinimum")
  354. delete(cleanedMap, "$schema")
  355. delete(cleanedMap, "additionalProperties")
  356. // Check and clean 'format' for string types
  357. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  358. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  359. if formatValue != "enum" && formatValue != "date-time" {
  360. delete(cleanedMap, "format")
  361. }
  362. }
  363. }
  364. // Clean properties
  365. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  366. cleanedProps := make(map[string]interface{})
  367. for propName, propValue := range props {
  368. cleanedProps[propName] = cleanFunctionParameters(propValue)
  369. }
  370. cleanedMap["properties"] = cleanedProps
  371. }
  372. // Recursively clean items in arrays
  373. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  374. cleanedMap["items"] = cleanFunctionParameters(items)
  375. }
  376. // Also handle items if it's an array of schemas
  377. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  378. cleanedItemsArray := make([]interface{}, len(itemsArray))
  379. for i, item := range itemsArray {
  380. cleanedItemsArray[i] = cleanFunctionParameters(item)
  381. }
  382. cleanedMap["items"] = cleanedItemsArray
  383. }
  384. // Recursively clean other schema composition keywords
  385. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  386. if nested, ok := cleanedMap[field].([]interface{}); ok {
  387. cleanedNested := make([]interface{}, len(nested))
  388. for i, item := range nested {
  389. cleanedNested[i] = cleanFunctionParameters(item)
  390. }
  391. cleanedMap[field] = cleanedNested
  392. }
  393. }
  394. // Recursively clean patternProperties
  395. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  396. cleanedPatternProps := make(map[string]interface{})
  397. for pattern, schema := range patternProps {
  398. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  399. }
  400. cleanedMap["patternProperties"] = cleanedPatternProps
  401. }
  402. // Recursively clean definitions
  403. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  404. cleanedDefinitions := make(map[string]interface{})
  405. for defName, defSchema := range definitions {
  406. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  407. }
  408. cleanedMap["definitions"] = cleanedDefinitions
  409. }
  410. // Recursively clean $defs (newer JSON Schema draft)
  411. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  412. cleanedDefs := make(map[string]interface{})
  413. for defName, defSchema := range defs {
  414. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  415. }
  416. cleanedMap["$defs"] = cleanedDefs
  417. }
  418. // Clean conditional keywords
  419. for _, field := range []string{"if", "then", "else", "not"} {
  420. if nested, ok := cleanedMap[field]; ok {
  421. cleanedMap[field] = cleanFunctionParameters(nested)
  422. }
  423. }
  424. return cleanedMap
  425. case []interface{}:
  426. // Handle arrays of schemas
  427. cleanedArray := make([]interface{}, len(v))
  428. for i, item := range v {
  429. cleanedArray[i] = cleanFunctionParameters(item)
  430. }
  431. return cleanedArray
  432. default:
  433. // Not a map or array, return as is (e.g., could be a primitive)
  434. return params
  435. }
  436. }
  437. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  438. if depth >= 5 {
  439. return schema
  440. }
  441. v, ok := schema.(map[string]interface{})
  442. if !ok || len(v) == 0 {
  443. return schema
  444. }
  445. // 删除所有的title字段
  446. delete(v, "title")
  447. delete(v, "$schema")
  448. // 如果type不为object和array,则直接返回
  449. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  450. return schema
  451. }
  452. switch v["type"] {
  453. case "object":
  454. delete(v, "additionalProperties")
  455. // 处理 properties
  456. if properties, ok := v["properties"].(map[string]interface{}); ok {
  457. for key, value := range properties {
  458. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  459. }
  460. }
  461. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  462. if nested, ok := v[field].([]interface{}); ok {
  463. for i, item := range nested {
  464. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  465. }
  466. }
  467. }
  468. case "array":
  469. if items, ok := v["items"].(map[string]interface{}); ok {
  470. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  471. }
  472. }
  473. return v
  474. }
  475. func unescapeString(s string) (string, error) {
  476. var result []rune
  477. escaped := false
  478. i := 0
  479. for i < len(s) {
  480. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  481. if r == utf8.RuneError {
  482. return "", fmt.Errorf("invalid UTF-8 encoding")
  483. }
  484. if escaped {
  485. // 如果是转义符后的字符,检查其类型
  486. switch r {
  487. case '"':
  488. result = append(result, '"')
  489. case '\\':
  490. result = append(result, '\\')
  491. case '/':
  492. result = append(result, '/')
  493. case 'b':
  494. result = append(result, '\b')
  495. case 'f':
  496. result = append(result, '\f')
  497. case 'n':
  498. result = append(result, '\n')
  499. case 'r':
  500. result = append(result, '\r')
  501. case 't':
  502. result = append(result, '\t')
  503. case '\'':
  504. result = append(result, '\'')
  505. default:
  506. // 如果遇到一个非法的转义字符,直接按原样输出
  507. result = append(result, '\\', r)
  508. }
  509. escaped = false
  510. } else {
  511. if r == '\\' {
  512. escaped = true // 记录反斜杠作为转义符
  513. } else {
  514. result = append(result, r)
  515. }
  516. }
  517. i += size // 移动到下一个字符
  518. }
  519. return string(result), nil
  520. }
  521. func unescapeMapOrSlice(data interface{}) interface{} {
  522. switch v := data.(type) {
  523. case map[string]interface{}:
  524. for k, val := range v {
  525. v[k] = unescapeMapOrSlice(val)
  526. }
  527. case []interface{}:
  528. for i, val := range v {
  529. v[i] = unescapeMapOrSlice(val)
  530. }
  531. case string:
  532. if unescaped, err := unescapeString(v); err != nil {
  533. return v
  534. } else {
  535. return unescaped
  536. }
  537. }
  538. return data
  539. }
  540. func getResponseToolCall(item *GeminiPart) *dto.ToolCallResponse {
  541. var argsBytes []byte
  542. var err error
  543. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  544. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  545. } else {
  546. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  547. }
  548. if err != nil {
  549. return nil
  550. }
  551. return &dto.ToolCallResponse{
  552. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  553. Type: "function",
  554. Function: dto.FunctionResponse{
  555. Arguments: string(argsBytes),
  556. Name: item.FunctionCall.FunctionName,
  557. },
  558. }
  559. }
  560. func responseGeminiChat2OpenAI(response *GeminiChatResponse) *dto.OpenAITextResponse {
  561. fullTextResponse := dto.OpenAITextResponse{
  562. Id: fmt.Sprintf("chatcmpl-%s", common.GetUUID()),
  563. Object: "chat.completion",
  564. Created: common.GetTimestamp(),
  565. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  566. }
  567. isToolCall := false
  568. for _, candidate := range response.Candidates {
  569. choice := dto.OpenAITextResponseChoice{
  570. Index: int(candidate.Index),
  571. Message: dto.Message{
  572. Role: "assistant",
  573. Content: "",
  574. },
  575. FinishReason: constant.FinishReasonStop,
  576. }
  577. if len(candidate.Content.Parts) > 0 {
  578. var texts []string
  579. var toolCalls []dto.ToolCallResponse
  580. for _, part := range candidate.Content.Parts {
  581. if part.FunctionCall != nil {
  582. choice.FinishReason = constant.FinishReasonToolCalls
  583. if call := getResponseToolCall(&part); call != nil {
  584. toolCalls = append(toolCalls, *call)
  585. }
  586. } else if part.Thought {
  587. choice.Message.ReasoningContent = part.Text
  588. } else {
  589. if part.ExecutableCode != nil {
  590. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  591. } else if part.CodeExecutionResult != nil {
  592. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  593. } else {
  594. // 过滤掉空行
  595. if part.Text != "\n" {
  596. texts = append(texts, part.Text)
  597. }
  598. }
  599. }
  600. }
  601. if len(toolCalls) > 0 {
  602. choice.Message.SetToolCalls(toolCalls)
  603. isToolCall = true
  604. }
  605. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  606. }
  607. if candidate.FinishReason != nil {
  608. switch *candidate.FinishReason {
  609. case "STOP":
  610. choice.FinishReason = constant.FinishReasonStop
  611. case "MAX_TOKENS":
  612. choice.FinishReason = constant.FinishReasonLength
  613. default:
  614. choice.FinishReason = constant.FinishReasonContentFilter
  615. }
  616. }
  617. if isToolCall {
  618. choice.FinishReason = constant.FinishReasonToolCalls
  619. }
  620. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  621. }
  622. return &fullTextResponse
  623. }
  624. func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool, bool) {
  625. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  626. isStop := false
  627. hasImage := false
  628. for _, candidate := range geminiResponse.Candidates {
  629. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  630. isStop = true
  631. candidate.FinishReason = nil
  632. }
  633. choice := dto.ChatCompletionsStreamResponseChoice{
  634. Index: int(candidate.Index),
  635. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  636. Role: "assistant",
  637. },
  638. }
  639. var texts []string
  640. isTools := false
  641. isThought := false
  642. if candidate.FinishReason != nil {
  643. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  644. switch *candidate.FinishReason {
  645. case "STOP":
  646. choice.FinishReason = &constant.FinishReasonStop
  647. case "MAX_TOKENS":
  648. choice.FinishReason = &constant.FinishReasonLength
  649. default:
  650. choice.FinishReason = &constant.FinishReasonContentFilter
  651. }
  652. }
  653. for _, part := range candidate.Content.Parts {
  654. if part.InlineData != nil {
  655. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  656. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  657. texts = append(texts, imgText)
  658. hasImage = true
  659. }
  660. } else if part.FunctionCall != nil {
  661. isTools = true
  662. if call := getResponseToolCall(&part); call != nil {
  663. call.SetIndex(len(choice.Delta.ToolCalls))
  664. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  665. }
  666. } else if part.Thought {
  667. isThought = true
  668. texts = append(texts, part.Text)
  669. } else {
  670. if part.ExecutableCode != nil {
  671. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  672. } else if part.CodeExecutionResult != nil {
  673. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  674. } else {
  675. if part.Text != "\n" {
  676. texts = append(texts, part.Text)
  677. }
  678. }
  679. }
  680. }
  681. if isThought {
  682. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  683. } else {
  684. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  685. }
  686. if isTools {
  687. choice.FinishReason = &constant.FinishReasonToolCalls
  688. }
  689. choices = append(choices, choice)
  690. }
  691. var response dto.ChatCompletionsStreamResponse
  692. response.Object = "chat.completion.chunk"
  693. response.Choices = choices
  694. return &response, isStop, hasImage
  695. }
  696. func GeminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  697. // responseText := ""
  698. id := fmt.Sprintf("chatcmpl-%s", common.GetUUID())
  699. createAt := common.GetTimestamp()
  700. var usage = &dto.Usage{}
  701. var imageCount int
  702. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  703. var geminiResponse GeminiChatResponse
  704. err := common.DecodeJsonStr(data, &geminiResponse)
  705. if err != nil {
  706. common.LogError(c, "error unmarshalling stream response: "+err.Error())
  707. return false
  708. }
  709. response, isStop, hasImage := streamResponseGeminiChat2OpenAI(&geminiResponse)
  710. if hasImage {
  711. imageCount++
  712. }
  713. response.Id = id
  714. response.Created = createAt
  715. response.Model = info.UpstreamModelName
  716. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  717. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  718. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  719. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  720. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  721. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  722. if detail.Modality == "AUDIO" {
  723. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  724. } else if detail.Modality == "TEXT" {
  725. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  726. }
  727. }
  728. }
  729. err = helper.ObjectData(c, response)
  730. if err != nil {
  731. common.LogError(c, err.Error())
  732. }
  733. if isStop {
  734. response := helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
  735. helper.ObjectData(c, response)
  736. }
  737. return true
  738. })
  739. var response *dto.ChatCompletionsStreamResponse
  740. if imageCount != 0 {
  741. if usage.CompletionTokens == 0 {
  742. usage.CompletionTokens = imageCount * 258
  743. }
  744. }
  745. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  746. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  747. if info.ShouldIncludeUsage {
  748. response = helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  749. err := helper.ObjectData(c, response)
  750. if err != nil {
  751. common.SysError("send final response failed: " + err.Error())
  752. }
  753. }
  754. helper.Done(c)
  755. //resp.Body.Close()
  756. return nil, usage
  757. }
  758. func GeminiChatHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  759. responseBody, err := io.ReadAll(resp.Body)
  760. if err != nil {
  761. return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
  762. }
  763. err = resp.Body.Close()
  764. if err != nil {
  765. return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
  766. }
  767. if common.DebugEnabled {
  768. println(string(responseBody))
  769. }
  770. var geminiResponse GeminiChatResponse
  771. err = common.DecodeJson(responseBody, &geminiResponse)
  772. if err != nil {
  773. return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
  774. }
  775. if len(geminiResponse.Candidates) == 0 {
  776. return &dto.OpenAIErrorWithStatusCode{
  777. Error: dto.OpenAIError{
  778. Message: "No candidates returned",
  779. Type: "server_error",
  780. Param: "",
  781. Code: 500,
  782. },
  783. StatusCode: resp.StatusCode,
  784. }, nil
  785. }
  786. fullTextResponse := responseGeminiChat2OpenAI(&geminiResponse)
  787. fullTextResponse.Model = info.UpstreamModelName
  788. usage := dto.Usage{
  789. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  790. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  791. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  792. }
  793. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  794. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  795. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  796. if detail.Modality == "AUDIO" {
  797. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  798. } else if detail.Modality == "TEXT" {
  799. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  800. }
  801. }
  802. fullTextResponse.Usage = usage
  803. jsonResponse, err := json.Marshal(fullTextResponse)
  804. if err != nil {
  805. return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
  806. }
  807. c.Writer.Header().Set("Content-Type", "application/json")
  808. c.Writer.WriteHeader(resp.StatusCode)
  809. _, err = c.Writer.Write(jsonResponse)
  810. return nil, &usage
  811. }
  812. func GeminiEmbeddingHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *dto.OpenAIErrorWithStatusCode) {
  813. responseBody, readErr := io.ReadAll(resp.Body)
  814. if readErr != nil {
  815. return nil, service.OpenAIErrorWrapper(readErr, "read_response_body_failed", http.StatusInternalServerError)
  816. }
  817. _ = resp.Body.Close()
  818. var geminiResponse GeminiEmbeddingResponse
  819. if jsonErr := json.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  820. return nil, service.OpenAIErrorWrapper(jsonErr, "unmarshal_response_body_failed", http.StatusInternalServerError)
  821. }
  822. // convert to openai format response
  823. openAIResponse := dto.OpenAIEmbeddingResponse{
  824. Object: "list",
  825. Data: []dto.OpenAIEmbeddingResponseItem{
  826. {
  827. Object: "embedding",
  828. Embedding: geminiResponse.Embedding.Values,
  829. Index: 0,
  830. },
  831. },
  832. Model: info.UpstreamModelName,
  833. }
  834. // calculate usage
  835. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  836. // Google has not yet clarified how embedding models will be billed
  837. // refer to openai billing method to use input tokens billing
  838. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  839. usage = &dto.Usage{
  840. PromptTokens: info.PromptTokens,
  841. CompletionTokens: 0,
  842. TotalTokens: info.PromptTokens,
  843. }
  844. openAIResponse.Usage = *usage.(*dto.Usage)
  845. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  846. if jsonErr != nil {
  847. return nil, service.OpenAIErrorWrapper(jsonErr, "marshal_response_failed", http.StatusInternalServerError)
  848. }
  849. c.Writer.Header().Set("Content-Type", "application/json")
  850. c.Writer.WriteHeader(resp.StatusCode)
  851. _, _ = c.Writer.Write(jsonResponse)
  852. return usage, nil
  853. }