relay-gemini.go 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "io"
  6. "net/http"
  7. "one-api/common"
  8. "one-api/constant"
  9. "one-api/dto"
  10. relaycommon "one-api/relay/common"
  11. "one-api/relay/helper"
  12. "one-api/service"
  13. "one-api/setting/model_setting"
  14. "strconv"
  15. "strings"
  16. "unicode/utf8"
  17. "github.com/gin-gonic/gin"
  18. )
  19. var geminiSupportedMimeTypes = map[string]bool{
  20. "application/pdf": true,
  21. "audio/mpeg": true,
  22. "audio/mp3": true,
  23. "audio/wav": true,
  24. "image/png": true,
  25. "image/jpeg": true,
  26. "text/plain": true,
  27. "video/mov": true,
  28. "video/mpeg": true,
  29. "video/mp4": true,
  30. "video/mpg": true,
  31. "video/avi": true,
  32. "video/wmv": true,
  33. "video/mpegps": true,
  34. "video/flv": true,
  35. }
  36. // Gemini 允许的思考预算范围
  37. const (
  38. pro25MinBudget = 128
  39. pro25MaxBudget = 32768
  40. flash25MaxBudget = 24576
  41. flash25LiteMinBudget = 512
  42. flash25LiteMaxBudget = 24576
  43. )
  44. // clampThinkingBudget 根据模型名称将预算限制在允许的范围内
  45. func clampThinkingBudget(modelName string, budget int) int {
  46. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  47. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  48. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  49. is25FlashLite := strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  50. if is25FlashLite {
  51. if budget < flash25LiteMinBudget {
  52. return flash25LiteMinBudget
  53. }
  54. if budget > flash25LiteMaxBudget {
  55. return flash25LiteMaxBudget
  56. }
  57. } else if isNew25Pro {
  58. if budget < pro25MinBudget {
  59. return pro25MinBudget
  60. }
  61. if budget > pro25MaxBudget {
  62. return pro25MaxBudget
  63. }
  64. } else { // 其他模型
  65. if budget < 0 {
  66. return 0
  67. }
  68. if budget > flash25MaxBudget {
  69. return flash25MaxBudget
  70. }
  71. }
  72. return budget
  73. }
  74. // Setting safety to the lowest possible values since Gemini is already powerless enough
  75. func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*GeminiChatRequest, error) {
  76. geminiRequest := GeminiChatRequest{
  77. Contents: make([]GeminiChatContent, 0, len(textRequest.Messages)),
  78. GenerationConfig: GeminiChatGenerationConfig{
  79. Temperature: textRequest.Temperature,
  80. TopP: textRequest.TopP,
  81. MaxOutputTokens: textRequest.MaxTokens,
  82. Seed: int64(textRequest.Seed),
  83. },
  84. }
  85. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  86. geminiRequest.GenerationConfig.ResponseModalities = []string{
  87. "TEXT",
  88. "IMAGE",
  89. }
  90. }
  91. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  92. modelName := info.OriginModelName
  93. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  94. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  95. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  96. is25FlashLite := strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  97. if strings.Contains(modelName, "-thinking-") {
  98. parts := strings.SplitN(modelName, "-thinking-", 2)
  99. if len(parts) == 2 && parts[1] != "" {
  100. if budgetTokens, err := strconv.Atoi(parts[1]); err == nil {
  101. clampedBudget := clampThinkingBudget(modelName, budgetTokens)
  102. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  103. ThinkingBudget: common.GetPointer(clampedBudget),
  104. IncludeThoughts: true,
  105. }
  106. }
  107. }
  108. } else if strings.HasSuffix(modelName, "-thinking") {
  109. unsupportedModels := []string{
  110. "gemini-2.5-pro-preview-05-06",
  111. "gemini-2.5-pro-preview-03-25",
  112. }
  113. isUnsupported := false
  114. for _, unsupportedModel := range unsupportedModels {
  115. if strings.HasPrefix(modelName, unsupportedModel) {
  116. isUnsupported = true
  117. break
  118. }
  119. }
  120. if isUnsupported {
  121. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  122. IncludeThoughts: true,
  123. }
  124. } else {
  125. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  126. clampedBudget := clampThinkingBudget(modelName, int(budgetTokens))
  127. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  128. ThinkingBudget: common.GetPointer(clampedBudget),
  129. IncludeThoughts: true,
  130. }
  131. }
  132. } else if strings.HasSuffix(modelName, "-nothinking") {
  133. if !isNew25Pro && !is25FlashLite {
  134. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  135. ThinkingBudget: common.GetPointer(0),
  136. }
  137. }
  138. }
  139. }
  140. safetySettings := make([]GeminiChatSafetySettings, 0, len(SafetySettingList))
  141. for _, category := range SafetySettingList {
  142. safetySettings = append(safetySettings, GeminiChatSafetySettings{
  143. Category: category,
  144. Threshold: model_setting.GetGeminiSafetySetting(category),
  145. })
  146. }
  147. geminiRequest.SafetySettings = safetySettings
  148. // openaiContent.FuncToToolCalls()
  149. if textRequest.Tools != nil {
  150. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  151. googleSearch := false
  152. codeExecution := false
  153. for _, tool := range textRequest.Tools {
  154. if tool.Function.Name == "googleSearch" {
  155. googleSearch = true
  156. continue
  157. }
  158. if tool.Function.Name == "codeExecution" {
  159. codeExecution = true
  160. continue
  161. }
  162. if tool.Function.Parameters != nil {
  163. params, ok := tool.Function.Parameters.(map[string]interface{})
  164. if ok {
  165. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  166. if len(props) == 0 {
  167. tool.Function.Parameters = nil
  168. }
  169. }
  170. }
  171. }
  172. // Clean the parameters before appending
  173. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  174. tool.Function.Parameters = cleanedParams
  175. functions = append(functions, tool.Function)
  176. }
  177. if codeExecution {
  178. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  179. CodeExecution: make(map[string]string),
  180. })
  181. }
  182. if googleSearch {
  183. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  184. GoogleSearch: make(map[string]string),
  185. })
  186. }
  187. if len(functions) > 0 {
  188. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  189. FunctionDeclarations: functions,
  190. })
  191. }
  192. // common.SysLog("tools: " + fmt.Sprintf("%+v", geminiRequest.Tools))
  193. // json_data, _ := json.Marshal(geminiRequest.Tools)
  194. // common.SysLog("tools_json: " + string(json_data))
  195. }
  196. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  197. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  198. if textRequest.ResponseFormat.JsonSchema != nil && textRequest.ResponseFormat.JsonSchema.Schema != nil {
  199. cleanedSchema := removeAdditionalPropertiesWithDepth(textRequest.ResponseFormat.JsonSchema.Schema, 0)
  200. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  201. }
  202. }
  203. tool_call_ids := make(map[string]string)
  204. var system_content []string
  205. //shouldAddDummyModelMessage := false
  206. for _, message := range textRequest.Messages {
  207. if message.Role == "system" {
  208. system_content = append(system_content, message.StringContent())
  209. continue
  210. } else if message.Role == "tool" || message.Role == "function" {
  211. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  212. geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{
  213. Role: "user",
  214. })
  215. }
  216. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  217. name := ""
  218. if message.Name != nil {
  219. name = *message.Name
  220. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  221. name = val
  222. }
  223. var contentMap map[string]interface{}
  224. contentStr := message.StringContent()
  225. // 1. 尝试解析为 JSON 对象
  226. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  227. // 2. 如果失败,尝试解析为 JSON 数组
  228. var contentSlice []interface{}
  229. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  230. // 如果是数组,包装成对象
  231. contentMap = map[string]interface{}{"result": contentSlice}
  232. } else {
  233. // 3. 如果再次失败,作为纯文本处理
  234. contentMap = map[string]interface{}{"content": contentStr}
  235. }
  236. }
  237. functionResp := &FunctionResponse{
  238. Name: name,
  239. Response: contentMap,
  240. }
  241. *parts = append(*parts, GeminiPart{
  242. FunctionResponse: functionResp,
  243. })
  244. continue
  245. }
  246. var parts []GeminiPart
  247. content := GeminiChatContent{
  248. Role: message.Role,
  249. }
  250. // isToolCall := false
  251. if message.ToolCalls != nil {
  252. // message.Role = "model"
  253. // isToolCall = true
  254. for _, call := range message.ParseToolCalls() {
  255. args := map[string]interface{}{}
  256. if call.Function.Arguments != "" {
  257. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  258. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  259. }
  260. }
  261. toolCall := GeminiPart{
  262. FunctionCall: &FunctionCall{
  263. FunctionName: call.Function.Name,
  264. Arguments: args,
  265. },
  266. }
  267. parts = append(parts, toolCall)
  268. tool_call_ids[call.ID] = call.Function.Name
  269. }
  270. }
  271. openaiContent := message.ParseContent()
  272. imageNum := 0
  273. for _, part := range openaiContent {
  274. if part.Type == dto.ContentTypeText {
  275. if part.Text == "" {
  276. continue
  277. }
  278. parts = append(parts, GeminiPart{
  279. Text: part.Text,
  280. })
  281. } else if part.Type == dto.ContentTypeImageURL {
  282. imageNum += 1
  283. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  284. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  285. }
  286. // 判断是否是url
  287. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  288. // 是url,获取文件的类型和base64编码的数据
  289. fileData, err := service.GetFileBase64FromUrl(part.GetImageMedia().Url)
  290. if err != nil {
  291. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  292. }
  293. // 校验 MimeType 是否在 Gemini 支持的白名单中
  294. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  295. url := part.GetImageMedia().Url
  296. return nil, fmt.Errorf("mime type is not supported by Gemini: '%s', url: '%s', supported types are: %v", fileData.MimeType, url, getSupportedMimeTypesList())
  297. }
  298. parts = append(parts, GeminiPart{
  299. InlineData: &GeminiInlineData{
  300. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  301. Data: fileData.Base64Data,
  302. },
  303. })
  304. } else {
  305. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  306. if err != nil {
  307. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  308. }
  309. parts = append(parts, GeminiPart{
  310. InlineData: &GeminiInlineData{
  311. MimeType: format,
  312. Data: base64String,
  313. },
  314. })
  315. }
  316. } else if part.Type == dto.ContentTypeFile {
  317. if part.GetFile().FileId != "" {
  318. return nil, fmt.Errorf("only base64 file is supported in gemini")
  319. }
  320. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  321. if err != nil {
  322. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  323. }
  324. parts = append(parts, GeminiPart{
  325. InlineData: &GeminiInlineData{
  326. MimeType: format,
  327. Data: base64String,
  328. },
  329. })
  330. } else if part.Type == dto.ContentTypeInputAudio {
  331. if part.GetInputAudio().Data == "" {
  332. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  333. }
  334. base64String, err := service.DecodeBase64AudioData(part.GetInputAudio().Data)
  335. if err != nil {
  336. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  337. }
  338. parts = append(parts, GeminiPart{
  339. InlineData: &GeminiInlineData{
  340. MimeType: "audio/" + part.GetInputAudio().Format,
  341. Data: base64String,
  342. },
  343. })
  344. }
  345. }
  346. content.Parts = parts
  347. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  348. if content.Role == "assistant" {
  349. content.Role = "model"
  350. }
  351. if len(content.Parts) > 0 {
  352. geminiRequest.Contents = append(geminiRequest.Contents, content)
  353. }
  354. }
  355. if len(system_content) > 0 {
  356. geminiRequest.SystemInstructions = &GeminiChatContent{
  357. Parts: []GeminiPart{
  358. {
  359. Text: strings.Join(system_content, "\n"),
  360. },
  361. },
  362. }
  363. }
  364. return &geminiRequest, nil
  365. }
  366. // Helper function to get a list of supported MIME types for error messages
  367. func getSupportedMimeTypesList() []string {
  368. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  369. for k := range geminiSupportedMimeTypes {
  370. keys = append(keys, k)
  371. }
  372. return keys
  373. }
  374. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  375. func cleanFunctionParameters(params interface{}) interface{} {
  376. if params == nil {
  377. return nil
  378. }
  379. switch v := params.(type) {
  380. case map[string]interface{}:
  381. // Create a copy to avoid modifying the original
  382. cleanedMap := make(map[string]interface{})
  383. for k, val := range v {
  384. cleanedMap[k] = val
  385. }
  386. // Remove unsupported root-level fields
  387. delete(cleanedMap, "default")
  388. delete(cleanedMap, "exclusiveMaximum")
  389. delete(cleanedMap, "exclusiveMinimum")
  390. delete(cleanedMap, "$schema")
  391. delete(cleanedMap, "additionalProperties")
  392. // Check and clean 'format' for string types
  393. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  394. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  395. if formatValue != "enum" && formatValue != "date-time" {
  396. delete(cleanedMap, "format")
  397. }
  398. }
  399. }
  400. // Clean properties
  401. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  402. cleanedProps := make(map[string]interface{})
  403. for propName, propValue := range props {
  404. cleanedProps[propName] = cleanFunctionParameters(propValue)
  405. }
  406. cleanedMap["properties"] = cleanedProps
  407. }
  408. // Recursively clean items in arrays
  409. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  410. cleanedMap["items"] = cleanFunctionParameters(items)
  411. }
  412. // Also handle items if it's an array of schemas
  413. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  414. cleanedItemsArray := make([]interface{}, len(itemsArray))
  415. for i, item := range itemsArray {
  416. cleanedItemsArray[i] = cleanFunctionParameters(item)
  417. }
  418. cleanedMap["items"] = cleanedItemsArray
  419. }
  420. // Recursively clean other schema composition keywords
  421. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  422. if nested, ok := cleanedMap[field].([]interface{}); ok {
  423. cleanedNested := make([]interface{}, len(nested))
  424. for i, item := range nested {
  425. cleanedNested[i] = cleanFunctionParameters(item)
  426. }
  427. cleanedMap[field] = cleanedNested
  428. }
  429. }
  430. // Recursively clean patternProperties
  431. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  432. cleanedPatternProps := make(map[string]interface{})
  433. for pattern, schema := range patternProps {
  434. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  435. }
  436. cleanedMap["patternProperties"] = cleanedPatternProps
  437. }
  438. // Recursively clean definitions
  439. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  440. cleanedDefinitions := make(map[string]interface{})
  441. for defName, defSchema := range definitions {
  442. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  443. }
  444. cleanedMap["definitions"] = cleanedDefinitions
  445. }
  446. // Recursively clean $defs (newer JSON Schema draft)
  447. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  448. cleanedDefs := make(map[string]interface{})
  449. for defName, defSchema := range defs {
  450. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  451. }
  452. cleanedMap["$defs"] = cleanedDefs
  453. }
  454. // Clean conditional keywords
  455. for _, field := range []string{"if", "then", "else", "not"} {
  456. if nested, ok := cleanedMap[field]; ok {
  457. cleanedMap[field] = cleanFunctionParameters(nested)
  458. }
  459. }
  460. return cleanedMap
  461. case []interface{}:
  462. // Handle arrays of schemas
  463. cleanedArray := make([]interface{}, len(v))
  464. for i, item := range v {
  465. cleanedArray[i] = cleanFunctionParameters(item)
  466. }
  467. return cleanedArray
  468. default:
  469. // Not a map or array, return as is (e.g., could be a primitive)
  470. return params
  471. }
  472. }
  473. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  474. if depth >= 5 {
  475. return schema
  476. }
  477. v, ok := schema.(map[string]interface{})
  478. if !ok || len(v) == 0 {
  479. return schema
  480. }
  481. // 删除所有的title字段
  482. delete(v, "title")
  483. delete(v, "$schema")
  484. // 如果type不为object和array,则直接返回
  485. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  486. return schema
  487. }
  488. switch v["type"] {
  489. case "object":
  490. delete(v, "additionalProperties")
  491. // 处理 properties
  492. if properties, ok := v["properties"].(map[string]interface{}); ok {
  493. for key, value := range properties {
  494. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  495. }
  496. }
  497. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  498. if nested, ok := v[field].([]interface{}); ok {
  499. for i, item := range nested {
  500. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  501. }
  502. }
  503. }
  504. case "array":
  505. if items, ok := v["items"].(map[string]interface{}); ok {
  506. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  507. }
  508. }
  509. return v
  510. }
  511. func unescapeString(s string) (string, error) {
  512. var result []rune
  513. escaped := false
  514. i := 0
  515. for i < len(s) {
  516. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  517. if r == utf8.RuneError {
  518. return "", fmt.Errorf("invalid UTF-8 encoding")
  519. }
  520. if escaped {
  521. // 如果是转义符后的字符,检查其类型
  522. switch r {
  523. case '"':
  524. result = append(result, '"')
  525. case '\\':
  526. result = append(result, '\\')
  527. case '/':
  528. result = append(result, '/')
  529. case 'b':
  530. result = append(result, '\b')
  531. case 'f':
  532. result = append(result, '\f')
  533. case 'n':
  534. result = append(result, '\n')
  535. case 'r':
  536. result = append(result, '\r')
  537. case 't':
  538. result = append(result, '\t')
  539. case '\'':
  540. result = append(result, '\'')
  541. default:
  542. // 如果遇到一个非法的转义字符,直接按原样输出
  543. result = append(result, '\\', r)
  544. }
  545. escaped = false
  546. } else {
  547. if r == '\\' {
  548. escaped = true // 记录反斜杠作为转义符
  549. } else {
  550. result = append(result, r)
  551. }
  552. }
  553. i += size // 移动到下一个字符
  554. }
  555. return string(result), nil
  556. }
  557. func unescapeMapOrSlice(data interface{}) interface{} {
  558. switch v := data.(type) {
  559. case map[string]interface{}:
  560. for k, val := range v {
  561. v[k] = unescapeMapOrSlice(val)
  562. }
  563. case []interface{}:
  564. for i, val := range v {
  565. v[i] = unescapeMapOrSlice(val)
  566. }
  567. case string:
  568. if unescaped, err := unescapeString(v); err != nil {
  569. return v
  570. } else {
  571. return unescaped
  572. }
  573. }
  574. return data
  575. }
  576. func getResponseToolCall(item *GeminiPart) *dto.ToolCallResponse {
  577. var argsBytes []byte
  578. var err error
  579. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  580. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  581. } else {
  582. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  583. }
  584. if err != nil {
  585. return nil
  586. }
  587. return &dto.ToolCallResponse{
  588. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  589. Type: "function",
  590. Function: dto.FunctionResponse{
  591. Arguments: string(argsBytes),
  592. Name: item.FunctionCall.FunctionName,
  593. },
  594. }
  595. }
  596. func responseGeminiChat2OpenAI(c *gin.Context, response *GeminiChatResponse) *dto.OpenAITextResponse {
  597. fullTextResponse := dto.OpenAITextResponse{
  598. Id: helper.GetResponseID(c),
  599. Object: "chat.completion",
  600. Created: common.GetTimestamp(),
  601. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  602. }
  603. isToolCall := false
  604. for _, candidate := range response.Candidates {
  605. choice := dto.OpenAITextResponseChoice{
  606. Index: int(candidate.Index),
  607. Message: dto.Message{
  608. Role: "assistant",
  609. Content: "",
  610. },
  611. FinishReason: constant.FinishReasonStop,
  612. }
  613. if len(candidate.Content.Parts) > 0 {
  614. var texts []string
  615. var toolCalls []dto.ToolCallResponse
  616. for _, part := range candidate.Content.Parts {
  617. if part.FunctionCall != nil {
  618. choice.FinishReason = constant.FinishReasonToolCalls
  619. if call := getResponseToolCall(&part); call != nil {
  620. toolCalls = append(toolCalls, *call)
  621. }
  622. } else if part.Thought {
  623. choice.Message.ReasoningContent = part.Text
  624. } else {
  625. if part.ExecutableCode != nil {
  626. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  627. } else if part.CodeExecutionResult != nil {
  628. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  629. } else {
  630. // 过滤掉空行
  631. if part.Text != "\n" {
  632. texts = append(texts, part.Text)
  633. }
  634. }
  635. }
  636. }
  637. if len(toolCalls) > 0 {
  638. choice.Message.SetToolCalls(toolCalls)
  639. isToolCall = true
  640. }
  641. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  642. }
  643. if candidate.FinishReason != nil {
  644. switch *candidate.FinishReason {
  645. case "STOP":
  646. choice.FinishReason = constant.FinishReasonStop
  647. case "MAX_TOKENS":
  648. choice.FinishReason = constant.FinishReasonLength
  649. default:
  650. choice.FinishReason = constant.FinishReasonContentFilter
  651. }
  652. }
  653. if isToolCall {
  654. choice.FinishReason = constant.FinishReasonToolCalls
  655. }
  656. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  657. }
  658. return &fullTextResponse
  659. }
  660. func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool, bool) {
  661. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  662. isStop := false
  663. hasImage := false
  664. for _, candidate := range geminiResponse.Candidates {
  665. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  666. isStop = true
  667. candidate.FinishReason = nil
  668. }
  669. choice := dto.ChatCompletionsStreamResponseChoice{
  670. Index: int(candidate.Index),
  671. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  672. Role: "assistant",
  673. },
  674. }
  675. var texts []string
  676. isTools := false
  677. isThought := false
  678. if candidate.FinishReason != nil {
  679. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  680. switch *candidate.FinishReason {
  681. case "STOP":
  682. choice.FinishReason = &constant.FinishReasonStop
  683. case "MAX_TOKENS":
  684. choice.FinishReason = &constant.FinishReasonLength
  685. default:
  686. choice.FinishReason = &constant.FinishReasonContentFilter
  687. }
  688. }
  689. for _, part := range candidate.Content.Parts {
  690. if part.InlineData != nil {
  691. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  692. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  693. texts = append(texts, imgText)
  694. hasImage = true
  695. }
  696. } else if part.FunctionCall != nil {
  697. isTools = true
  698. if call := getResponseToolCall(&part); call != nil {
  699. call.SetIndex(len(choice.Delta.ToolCalls))
  700. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  701. }
  702. } else if part.Thought {
  703. isThought = true
  704. texts = append(texts, part.Text)
  705. } else {
  706. if part.ExecutableCode != nil {
  707. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  708. } else if part.CodeExecutionResult != nil {
  709. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  710. } else {
  711. if part.Text != "\n" {
  712. texts = append(texts, part.Text)
  713. }
  714. }
  715. }
  716. }
  717. if isThought {
  718. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  719. } else {
  720. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  721. }
  722. if isTools {
  723. choice.FinishReason = &constant.FinishReasonToolCalls
  724. }
  725. choices = append(choices, choice)
  726. }
  727. var response dto.ChatCompletionsStreamResponse
  728. response.Object = "chat.completion.chunk"
  729. response.Choices = choices
  730. return &response, isStop, hasImage
  731. }
  732. func GeminiChatStreamHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  733. // responseText := ""
  734. id := helper.GetResponseID(c)
  735. createAt := common.GetTimestamp()
  736. var usage = &dto.Usage{}
  737. var imageCount int
  738. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  739. var geminiResponse GeminiChatResponse
  740. err := common.DecodeJsonStr(data, &geminiResponse)
  741. if err != nil {
  742. common.LogError(c, "error unmarshalling stream response: "+err.Error())
  743. return false
  744. }
  745. response, isStop, hasImage := streamResponseGeminiChat2OpenAI(&geminiResponse)
  746. if hasImage {
  747. imageCount++
  748. }
  749. response.Id = id
  750. response.Created = createAt
  751. response.Model = info.UpstreamModelName
  752. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  753. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  754. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  755. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  756. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  757. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  758. if detail.Modality == "AUDIO" {
  759. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  760. } else if detail.Modality == "TEXT" {
  761. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  762. }
  763. }
  764. }
  765. err = helper.ObjectData(c, response)
  766. if err != nil {
  767. common.LogError(c, err.Error())
  768. }
  769. if isStop {
  770. response := helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
  771. helper.ObjectData(c, response)
  772. }
  773. return true
  774. })
  775. var response *dto.ChatCompletionsStreamResponse
  776. if imageCount != 0 {
  777. if usage.CompletionTokens == 0 {
  778. usage.CompletionTokens = imageCount * 258
  779. }
  780. }
  781. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  782. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  783. if info.ShouldIncludeUsage {
  784. response = helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  785. err := helper.ObjectData(c, response)
  786. if err != nil {
  787. common.SysError("send final response failed: " + err.Error())
  788. }
  789. }
  790. helper.Done(c)
  791. //resp.Body.Close()
  792. return nil, usage
  793. }
  794. func GeminiChatHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (*dto.OpenAIErrorWithStatusCode, *dto.Usage) {
  795. responseBody, err := io.ReadAll(resp.Body)
  796. if err != nil {
  797. return service.OpenAIErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
  798. }
  799. err = resp.Body.Close()
  800. if err != nil {
  801. return service.OpenAIErrorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
  802. }
  803. if common.DebugEnabled {
  804. println(string(responseBody))
  805. }
  806. var geminiResponse GeminiChatResponse
  807. err = common.DecodeJson(responseBody, &geminiResponse)
  808. if err != nil {
  809. return service.OpenAIErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
  810. }
  811. if len(geminiResponse.Candidates) == 0 {
  812. return &dto.OpenAIErrorWithStatusCode{
  813. Error: dto.OpenAIError{
  814. Message: "No candidates returned",
  815. Type: "server_error",
  816. Param: "",
  817. Code: 500,
  818. },
  819. StatusCode: resp.StatusCode,
  820. }, nil
  821. }
  822. fullTextResponse := responseGeminiChat2OpenAI(c, &geminiResponse)
  823. fullTextResponse.Model = info.UpstreamModelName
  824. usage := dto.Usage{
  825. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  826. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  827. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  828. }
  829. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  830. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  831. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  832. if detail.Modality == "AUDIO" {
  833. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  834. } else if detail.Modality == "TEXT" {
  835. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  836. }
  837. }
  838. fullTextResponse.Usage = usage
  839. jsonResponse, err := json.Marshal(fullTextResponse)
  840. if err != nil {
  841. return service.OpenAIErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
  842. }
  843. c.Writer.Header().Set("Content-Type", "application/json")
  844. c.Writer.WriteHeader(resp.StatusCode)
  845. _, err = c.Writer.Write(jsonResponse)
  846. return nil, &usage
  847. }
  848. func GeminiEmbeddingHandler(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *dto.OpenAIErrorWithStatusCode) {
  849. responseBody, readErr := io.ReadAll(resp.Body)
  850. if readErr != nil {
  851. return nil, service.OpenAIErrorWrapper(readErr, "read_response_body_failed", http.StatusInternalServerError)
  852. }
  853. _ = resp.Body.Close()
  854. var geminiResponse GeminiEmbeddingResponse
  855. if jsonErr := json.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  856. return nil, service.OpenAIErrorWrapper(jsonErr, "unmarshal_response_body_failed", http.StatusInternalServerError)
  857. }
  858. // convert to openai format response
  859. openAIResponse := dto.OpenAIEmbeddingResponse{
  860. Object: "list",
  861. Data: []dto.OpenAIEmbeddingResponseItem{
  862. {
  863. Object: "embedding",
  864. Embedding: geminiResponse.Embedding.Values,
  865. Index: 0,
  866. },
  867. },
  868. Model: info.UpstreamModelName,
  869. }
  870. // calculate usage
  871. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  872. // Google has not yet clarified how embedding models will be billed
  873. // refer to openai billing method to use input tokens billing
  874. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  875. usage = &dto.Usage{
  876. PromptTokens: info.PromptTokens,
  877. CompletionTokens: 0,
  878. TotalTokens: info.PromptTokens,
  879. }
  880. openAIResponse.Usage = *usage.(*dto.Usage)
  881. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  882. if jsonErr != nil {
  883. return nil, service.OpenAIErrorWrapper(jsonErr, "marshal_response_failed", http.StatusInternalServerError)
  884. }
  885. c.Writer.Header().Set("Content-Type", "application/json")
  886. c.Writer.WriteHeader(resp.StatusCode)
  887. _, _ = c.Writer.Write(jsonResponse)
  888. return usage, nil
  889. }