relay-gemini.go 33 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "net/http"
  8. "one-api/common"
  9. "one-api/constant"
  10. "one-api/dto"
  11. "one-api/relay/channel/openai"
  12. relaycommon "one-api/relay/common"
  13. "one-api/relay/helper"
  14. "one-api/service"
  15. "one-api/setting/model_setting"
  16. "one-api/types"
  17. "strconv"
  18. "strings"
  19. "unicode/utf8"
  20. "github.com/gin-gonic/gin"
  21. )
  22. var geminiSupportedMimeTypes = map[string]bool{
  23. "application/pdf": true,
  24. "audio/mpeg": true,
  25. "audio/mp3": true,
  26. "audio/wav": true,
  27. "image/png": true,
  28. "image/jpeg": true,
  29. "text/plain": true,
  30. "video/mov": true,
  31. "video/mpeg": true,
  32. "video/mp4": true,
  33. "video/mpg": true,
  34. "video/avi": true,
  35. "video/wmv": true,
  36. "video/mpegps": true,
  37. "video/flv": true,
  38. }
  39. // Gemini 允许的思考预算范围
  40. const (
  41. pro25MinBudget = 128
  42. pro25MaxBudget = 32768
  43. flash25MaxBudget = 24576
  44. flash25LiteMinBudget = 512
  45. flash25LiteMaxBudget = 24576
  46. )
  47. // clampThinkingBudget 根据模型名称将预算限制在允许的范围内
  48. func clampThinkingBudget(modelName string, budget int) int {
  49. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  50. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  51. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  52. is25FlashLite := strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  53. if is25FlashLite {
  54. if budget < flash25LiteMinBudget {
  55. return flash25LiteMinBudget
  56. }
  57. if budget > flash25LiteMaxBudget {
  58. return flash25LiteMaxBudget
  59. }
  60. } else if isNew25Pro {
  61. if budget < pro25MinBudget {
  62. return pro25MinBudget
  63. }
  64. if budget > pro25MaxBudget {
  65. return pro25MaxBudget
  66. }
  67. } else { // 其他模型
  68. if budget < 0 {
  69. return 0
  70. }
  71. if budget > flash25MaxBudget {
  72. return flash25MaxBudget
  73. }
  74. }
  75. return budget
  76. }
  77. func ThinkingAdaptor(geminiRequest *GeminiChatRequest, info *relaycommon.RelayInfo) {
  78. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  79. modelName := info.UpstreamModelName
  80. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  81. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  82. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  83. if strings.Contains(modelName, "-thinking-") {
  84. parts := strings.SplitN(modelName, "-thinking-", 2)
  85. if len(parts) == 2 && parts[1] != "" {
  86. if budgetTokens, err := strconv.Atoi(parts[1]); err == nil {
  87. clampedBudget := clampThinkingBudget(modelName, budgetTokens)
  88. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  89. ThinkingBudget: common.GetPointer(clampedBudget),
  90. IncludeThoughts: true,
  91. }
  92. }
  93. }
  94. } else if strings.HasSuffix(modelName, "-thinking") {
  95. unsupportedModels := []string{
  96. "gemini-2.5-pro-preview-05-06",
  97. "gemini-2.5-pro-preview-03-25",
  98. }
  99. isUnsupported := false
  100. for _, unsupportedModel := range unsupportedModels {
  101. if strings.HasPrefix(modelName, unsupportedModel) {
  102. isUnsupported = true
  103. break
  104. }
  105. }
  106. if isUnsupported {
  107. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  108. IncludeThoughts: true,
  109. }
  110. } else {
  111. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  112. IncludeThoughts: true,
  113. }
  114. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  115. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  116. clampedBudget := clampThinkingBudget(modelName, int(budgetTokens))
  117. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampedBudget)
  118. }
  119. }
  120. } else if strings.HasSuffix(modelName, "-nothinking") {
  121. if !isNew25Pro {
  122. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  123. ThinkingBudget: common.GetPointer(0),
  124. }
  125. }
  126. }
  127. }
  128. }
  129. // Setting safety to the lowest possible values since Gemini is already powerless enough
  130. func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*GeminiChatRequest, error) {
  131. geminiRequest := GeminiChatRequest{
  132. Contents: make([]GeminiChatContent, 0, len(textRequest.Messages)),
  133. GenerationConfig: GeminiChatGenerationConfig{
  134. Temperature: textRequest.Temperature,
  135. TopP: textRequest.TopP,
  136. MaxOutputTokens: textRequest.MaxTokens,
  137. Seed: int64(textRequest.Seed),
  138. },
  139. }
  140. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  141. geminiRequest.GenerationConfig.ResponseModalities = []string{
  142. "TEXT",
  143. "IMAGE",
  144. }
  145. }
  146. ThinkingAdaptor(&geminiRequest, info)
  147. safetySettings := make([]GeminiChatSafetySettings, 0, len(SafetySettingList))
  148. for _, category := range SafetySettingList {
  149. safetySettings = append(safetySettings, GeminiChatSafetySettings{
  150. Category: category,
  151. Threshold: model_setting.GetGeminiSafetySetting(category),
  152. })
  153. }
  154. geminiRequest.SafetySettings = safetySettings
  155. // openaiContent.FuncToToolCalls()
  156. if textRequest.Tools != nil {
  157. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  158. googleSearch := false
  159. codeExecution := false
  160. for _, tool := range textRequest.Tools {
  161. if tool.Function.Name == "googleSearch" {
  162. googleSearch = true
  163. continue
  164. }
  165. if tool.Function.Name == "codeExecution" {
  166. codeExecution = true
  167. continue
  168. }
  169. if tool.Function.Parameters != nil {
  170. params, ok := tool.Function.Parameters.(map[string]interface{})
  171. if ok {
  172. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  173. if len(props) == 0 {
  174. tool.Function.Parameters = nil
  175. }
  176. }
  177. }
  178. }
  179. // Clean the parameters before appending
  180. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  181. tool.Function.Parameters = cleanedParams
  182. functions = append(functions, tool.Function)
  183. }
  184. if codeExecution {
  185. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  186. CodeExecution: make(map[string]string),
  187. })
  188. }
  189. if googleSearch {
  190. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  191. GoogleSearch: make(map[string]string),
  192. })
  193. }
  194. if len(functions) > 0 {
  195. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  196. FunctionDeclarations: functions,
  197. })
  198. }
  199. // common.SysLog("tools: " + fmt.Sprintf("%+v", geminiRequest.Tools))
  200. // json_data, _ := json.Marshal(geminiRequest.Tools)
  201. // common.SysLog("tools_json: " + string(json_data))
  202. }
  203. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  204. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  205. if len(textRequest.ResponseFormat.JsonSchema) > 0 {
  206. // 先将json.RawMessage解析
  207. var jsonSchema dto.FormatJsonSchema
  208. if err := common.Unmarshal(textRequest.ResponseFormat.JsonSchema, &jsonSchema); err == nil {
  209. cleanedSchema := removeAdditionalPropertiesWithDepth(jsonSchema.Schema, 0)
  210. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  211. }
  212. }
  213. }
  214. tool_call_ids := make(map[string]string)
  215. var system_content []string
  216. //shouldAddDummyModelMessage := false
  217. for _, message := range textRequest.Messages {
  218. if message.Role == "system" {
  219. system_content = append(system_content, message.StringContent())
  220. continue
  221. } else if message.Role == "tool" || message.Role == "function" {
  222. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  223. geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{
  224. Role: "user",
  225. })
  226. }
  227. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  228. name := ""
  229. if message.Name != nil {
  230. name = *message.Name
  231. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  232. name = val
  233. }
  234. var contentMap map[string]interface{}
  235. contentStr := message.StringContent()
  236. // 1. 尝试解析为 JSON 对象
  237. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  238. // 2. 如果失败,尝试解析为 JSON 数组
  239. var contentSlice []interface{}
  240. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  241. // 如果是数组,包装成对象
  242. contentMap = map[string]interface{}{"result": contentSlice}
  243. } else {
  244. // 3. 如果再次失败,作为纯文本处理
  245. contentMap = map[string]interface{}{"content": contentStr}
  246. }
  247. }
  248. functionResp := &FunctionResponse{
  249. Name: name,
  250. Response: contentMap,
  251. }
  252. *parts = append(*parts, GeminiPart{
  253. FunctionResponse: functionResp,
  254. })
  255. continue
  256. }
  257. var parts []GeminiPart
  258. content := GeminiChatContent{
  259. Role: message.Role,
  260. }
  261. // isToolCall := false
  262. if message.ToolCalls != nil {
  263. // message.Role = "model"
  264. // isToolCall = true
  265. for _, call := range message.ParseToolCalls() {
  266. args := map[string]interface{}{}
  267. if call.Function.Arguments != "" {
  268. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  269. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  270. }
  271. }
  272. toolCall := GeminiPart{
  273. FunctionCall: &FunctionCall{
  274. FunctionName: call.Function.Name,
  275. Arguments: args,
  276. },
  277. }
  278. parts = append(parts, toolCall)
  279. tool_call_ids[call.ID] = call.Function.Name
  280. }
  281. }
  282. openaiContent := message.ParseContent()
  283. imageNum := 0
  284. for _, part := range openaiContent {
  285. if part.Type == dto.ContentTypeText {
  286. if part.Text == "" {
  287. continue
  288. }
  289. parts = append(parts, GeminiPart{
  290. Text: part.Text,
  291. })
  292. } else if part.Type == dto.ContentTypeImageURL {
  293. imageNum += 1
  294. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  295. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  296. }
  297. // 判断是否是url
  298. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  299. // 是url,获取文件的类型和base64编码的数据
  300. fileData, err := service.GetFileBase64FromUrl(part.GetImageMedia().Url)
  301. if err != nil {
  302. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  303. }
  304. // 校验 MimeType 是否在 Gemini 支持的白名单中
  305. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  306. url := part.GetImageMedia().Url
  307. return nil, fmt.Errorf("mime type is not supported by Gemini: '%s', url: '%s', supported types are: %v", fileData.MimeType, url, getSupportedMimeTypesList())
  308. }
  309. parts = append(parts, GeminiPart{
  310. InlineData: &GeminiInlineData{
  311. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  312. Data: fileData.Base64Data,
  313. },
  314. })
  315. } else {
  316. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  317. if err != nil {
  318. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  319. }
  320. parts = append(parts, GeminiPart{
  321. InlineData: &GeminiInlineData{
  322. MimeType: format,
  323. Data: base64String,
  324. },
  325. })
  326. }
  327. } else if part.Type == dto.ContentTypeFile {
  328. if part.GetFile().FileId != "" {
  329. return nil, fmt.Errorf("only base64 file is supported in gemini")
  330. }
  331. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  332. if err != nil {
  333. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  334. }
  335. parts = append(parts, GeminiPart{
  336. InlineData: &GeminiInlineData{
  337. MimeType: format,
  338. Data: base64String,
  339. },
  340. })
  341. } else if part.Type == dto.ContentTypeInputAudio {
  342. if part.GetInputAudio().Data == "" {
  343. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  344. }
  345. base64String, err := service.DecodeBase64AudioData(part.GetInputAudio().Data)
  346. if err != nil {
  347. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  348. }
  349. parts = append(parts, GeminiPart{
  350. InlineData: &GeminiInlineData{
  351. MimeType: "audio/" + part.GetInputAudio().Format,
  352. Data: base64String,
  353. },
  354. })
  355. }
  356. }
  357. content.Parts = parts
  358. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  359. if content.Role == "assistant" {
  360. content.Role = "model"
  361. }
  362. if len(content.Parts) > 0 {
  363. geminiRequest.Contents = append(geminiRequest.Contents, content)
  364. }
  365. }
  366. if len(system_content) > 0 {
  367. geminiRequest.SystemInstructions = &GeminiChatContent{
  368. Parts: []GeminiPart{
  369. {
  370. Text: strings.Join(system_content, "\n"),
  371. },
  372. },
  373. }
  374. }
  375. return &geminiRequest, nil
  376. }
  377. // Helper function to get a list of supported MIME types for error messages
  378. func getSupportedMimeTypesList() []string {
  379. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  380. for k := range geminiSupportedMimeTypes {
  381. keys = append(keys, k)
  382. }
  383. return keys
  384. }
  385. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  386. func cleanFunctionParameters(params interface{}) interface{} {
  387. if params == nil {
  388. return nil
  389. }
  390. switch v := params.(type) {
  391. case map[string]interface{}:
  392. // Create a copy to avoid modifying the original
  393. cleanedMap := make(map[string]interface{})
  394. for k, val := range v {
  395. cleanedMap[k] = val
  396. }
  397. // Remove unsupported root-level fields
  398. delete(cleanedMap, "default")
  399. delete(cleanedMap, "exclusiveMaximum")
  400. delete(cleanedMap, "exclusiveMinimum")
  401. delete(cleanedMap, "$schema")
  402. delete(cleanedMap, "additionalProperties")
  403. // Check and clean 'format' for string types
  404. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  405. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  406. if formatValue != "enum" && formatValue != "date-time" {
  407. delete(cleanedMap, "format")
  408. }
  409. }
  410. }
  411. // Clean properties
  412. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  413. cleanedProps := make(map[string]interface{})
  414. for propName, propValue := range props {
  415. cleanedProps[propName] = cleanFunctionParameters(propValue)
  416. }
  417. cleanedMap["properties"] = cleanedProps
  418. }
  419. // Recursively clean items in arrays
  420. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  421. cleanedMap["items"] = cleanFunctionParameters(items)
  422. }
  423. // Also handle items if it's an array of schemas
  424. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  425. cleanedItemsArray := make([]interface{}, len(itemsArray))
  426. for i, item := range itemsArray {
  427. cleanedItemsArray[i] = cleanFunctionParameters(item)
  428. }
  429. cleanedMap["items"] = cleanedItemsArray
  430. }
  431. // Recursively clean other schema composition keywords
  432. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  433. if nested, ok := cleanedMap[field].([]interface{}); ok {
  434. cleanedNested := make([]interface{}, len(nested))
  435. for i, item := range nested {
  436. cleanedNested[i] = cleanFunctionParameters(item)
  437. }
  438. cleanedMap[field] = cleanedNested
  439. }
  440. }
  441. // Recursively clean patternProperties
  442. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  443. cleanedPatternProps := make(map[string]interface{})
  444. for pattern, schema := range patternProps {
  445. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  446. }
  447. cleanedMap["patternProperties"] = cleanedPatternProps
  448. }
  449. // Recursively clean definitions
  450. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  451. cleanedDefinitions := make(map[string]interface{})
  452. for defName, defSchema := range definitions {
  453. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  454. }
  455. cleanedMap["definitions"] = cleanedDefinitions
  456. }
  457. // Recursively clean $defs (newer JSON Schema draft)
  458. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  459. cleanedDefs := make(map[string]interface{})
  460. for defName, defSchema := range defs {
  461. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  462. }
  463. cleanedMap["$defs"] = cleanedDefs
  464. }
  465. // Clean conditional keywords
  466. for _, field := range []string{"if", "then", "else", "not"} {
  467. if nested, ok := cleanedMap[field]; ok {
  468. cleanedMap[field] = cleanFunctionParameters(nested)
  469. }
  470. }
  471. return cleanedMap
  472. case []interface{}:
  473. // Handle arrays of schemas
  474. cleanedArray := make([]interface{}, len(v))
  475. for i, item := range v {
  476. cleanedArray[i] = cleanFunctionParameters(item)
  477. }
  478. return cleanedArray
  479. default:
  480. // Not a map or array, return as is (e.g., could be a primitive)
  481. return params
  482. }
  483. }
  484. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  485. if depth >= 5 {
  486. return schema
  487. }
  488. v, ok := schema.(map[string]interface{})
  489. if !ok || len(v) == 0 {
  490. return schema
  491. }
  492. // 删除所有的title字段
  493. delete(v, "title")
  494. delete(v, "$schema")
  495. // 如果type不为object和array,则直接返回
  496. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  497. return schema
  498. }
  499. switch v["type"] {
  500. case "object":
  501. delete(v, "additionalProperties")
  502. // 处理 properties
  503. if properties, ok := v["properties"].(map[string]interface{}); ok {
  504. for key, value := range properties {
  505. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  506. }
  507. }
  508. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  509. if nested, ok := v[field].([]interface{}); ok {
  510. for i, item := range nested {
  511. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  512. }
  513. }
  514. }
  515. case "array":
  516. if items, ok := v["items"].(map[string]interface{}); ok {
  517. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  518. }
  519. }
  520. return v
  521. }
  522. func unescapeString(s string) (string, error) {
  523. var result []rune
  524. escaped := false
  525. i := 0
  526. for i < len(s) {
  527. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  528. if r == utf8.RuneError {
  529. return "", fmt.Errorf("invalid UTF-8 encoding")
  530. }
  531. if escaped {
  532. // 如果是转义符后的字符,检查其类型
  533. switch r {
  534. case '"':
  535. result = append(result, '"')
  536. case '\\':
  537. result = append(result, '\\')
  538. case '/':
  539. result = append(result, '/')
  540. case 'b':
  541. result = append(result, '\b')
  542. case 'f':
  543. result = append(result, '\f')
  544. case 'n':
  545. result = append(result, '\n')
  546. case 'r':
  547. result = append(result, '\r')
  548. case 't':
  549. result = append(result, '\t')
  550. case '\'':
  551. result = append(result, '\'')
  552. default:
  553. // 如果遇到一个非法的转义字符,直接按原样输出
  554. result = append(result, '\\', r)
  555. }
  556. escaped = false
  557. } else {
  558. if r == '\\' {
  559. escaped = true // 记录反斜杠作为转义符
  560. } else {
  561. result = append(result, r)
  562. }
  563. }
  564. i += size // 移动到下一个字符
  565. }
  566. return string(result), nil
  567. }
  568. func unescapeMapOrSlice(data interface{}) interface{} {
  569. switch v := data.(type) {
  570. case map[string]interface{}:
  571. for k, val := range v {
  572. v[k] = unescapeMapOrSlice(val)
  573. }
  574. case []interface{}:
  575. for i, val := range v {
  576. v[i] = unescapeMapOrSlice(val)
  577. }
  578. case string:
  579. if unescaped, err := unescapeString(v); err != nil {
  580. return v
  581. } else {
  582. return unescaped
  583. }
  584. }
  585. return data
  586. }
  587. func getResponseToolCall(item *GeminiPart) *dto.ToolCallResponse {
  588. var argsBytes []byte
  589. var err error
  590. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  591. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  592. } else {
  593. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  594. }
  595. if err != nil {
  596. return nil
  597. }
  598. return &dto.ToolCallResponse{
  599. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  600. Type: "function",
  601. Function: dto.FunctionResponse{
  602. Arguments: string(argsBytes),
  603. Name: item.FunctionCall.FunctionName,
  604. },
  605. }
  606. }
  607. func responseGeminiChat2OpenAI(c *gin.Context, response *GeminiChatResponse) *dto.OpenAITextResponse {
  608. fullTextResponse := dto.OpenAITextResponse{
  609. Id: helper.GetResponseID(c),
  610. Object: "chat.completion",
  611. Created: common.GetTimestamp(),
  612. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  613. }
  614. isToolCall := false
  615. for _, candidate := range response.Candidates {
  616. choice := dto.OpenAITextResponseChoice{
  617. Index: int(candidate.Index),
  618. Message: dto.Message{
  619. Role: "assistant",
  620. Content: "",
  621. },
  622. FinishReason: constant.FinishReasonStop,
  623. }
  624. if len(candidate.Content.Parts) > 0 {
  625. var texts []string
  626. var toolCalls []dto.ToolCallResponse
  627. for _, part := range candidate.Content.Parts {
  628. if part.FunctionCall != nil {
  629. choice.FinishReason = constant.FinishReasonToolCalls
  630. if call := getResponseToolCall(&part); call != nil {
  631. toolCalls = append(toolCalls, *call)
  632. }
  633. } else if part.Thought {
  634. choice.Message.ReasoningContent = part.Text
  635. } else {
  636. if part.ExecutableCode != nil {
  637. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  638. } else if part.CodeExecutionResult != nil {
  639. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  640. } else {
  641. // 过滤掉空行
  642. if part.Text != "\n" {
  643. texts = append(texts, part.Text)
  644. }
  645. }
  646. }
  647. }
  648. if len(toolCalls) > 0 {
  649. choice.Message.SetToolCalls(toolCalls)
  650. isToolCall = true
  651. }
  652. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  653. }
  654. if candidate.FinishReason != nil {
  655. switch *candidate.FinishReason {
  656. case "STOP":
  657. choice.FinishReason = constant.FinishReasonStop
  658. case "MAX_TOKENS":
  659. choice.FinishReason = constant.FinishReasonLength
  660. default:
  661. choice.FinishReason = constant.FinishReasonContentFilter
  662. }
  663. }
  664. if isToolCall {
  665. choice.FinishReason = constant.FinishReasonToolCalls
  666. }
  667. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  668. }
  669. return &fullTextResponse
  670. }
  671. func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool, bool) {
  672. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  673. isStop := false
  674. hasImage := false
  675. for _, candidate := range geminiResponse.Candidates {
  676. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  677. isStop = true
  678. candidate.FinishReason = nil
  679. }
  680. choice := dto.ChatCompletionsStreamResponseChoice{
  681. Index: int(candidate.Index),
  682. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  683. //Role: "assistant",
  684. },
  685. }
  686. var texts []string
  687. isTools := false
  688. isThought := false
  689. if candidate.FinishReason != nil {
  690. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  691. switch *candidate.FinishReason {
  692. case "STOP":
  693. choice.FinishReason = &constant.FinishReasonStop
  694. case "MAX_TOKENS":
  695. choice.FinishReason = &constant.FinishReasonLength
  696. default:
  697. choice.FinishReason = &constant.FinishReasonContentFilter
  698. }
  699. }
  700. for _, part := range candidate.Content.Parts {
  701. if part.InlineData != nil {
  702. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  703. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  704. texts = append(texts, imgText)
  705. hasImage = true
  706. }
  707. } else if part.FunctionCall != nil {
  708. isTools = true
  709. if call := getResponseToolCall(&part); call != nil {
  710. call.SetIndex(len(choice.Delta.ToolCalls))
  711. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  712. }
  713. } else if part.Thought {
  714. isThought = true
  715. texts = append(texts, part.Text)
  716. } else {
  717. if part.ExecutableCode != nil {
  718. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  719. } else if part.CodeExecutionResult != nil {
  720. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  721. } else {
  722. if part.Text != "\n" {
  723. texts = append(texts, part.Text)
  724. }
  725. }
  726. }
  727. }
  728. if isThought {
  729. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  730. } else {
  731. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  732. }
  733. if isTools {
  734. choice.FinishReason = &constant.FinishReasonToolCalls
  735. }
  736. choices = append(choices, choice)
  737. }
  738. var response dto.ChatCompletionsStreamResponse
  739. response.Object = "chat.completion.chunk"
  740. response.Choices = choices
  741. return &response, isStop, hasImage
  742. }
  743. func handleStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  744. streamData, err := common.Marshal(resp)
  745. if err != nil {
  746. return fmt.Errorf("failed to marshal stream response: %w", err)
  747. }
  748. err = openai.HandleStreamFormat(c, info, string(streamData), info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  749. if err != nil {
  750. return fmt.Errorf("failed to handle stream format: %w", err)
  751. }
  752. return nil
  753. }
  754. func handleFinalStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  755. streamData, err := common.Marshal(resp)
  756. if err != nil {
  757. return fmt.Errorf("failed to marshal stream response: %w", err)
  758. }
  759. openai.HandleFinalResponse(c, info, string(streamData), resp.Id, resp.Created, resp.Model, resp.GetSystemFingerprint(), resp.Usage, info.ShouldIncludeUsage)
  760. return nil
  761. }
  762. func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  763. // responseText := ""
  764. id := helper.GetResponseID(c)
  765. createAt := common.GetTimestamp()
  766. var usage = &dto.Usage{}
  767. var imageCount int
  768. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  769. var geminiResponse GeminiChatResponse
  770. err := common.UnmarshalJsonStr(data, &geminiResponse)
  771. if err != nil {
  772. common.LogError(c, "error unmarshalling stream response: "+err.Error())
  773. return false
  774. }
  775. response, isStop, hasImage := streamResponseGeminiChat2OpenAI(&geminiResponse)
  776. if hasImage {
  777. imageCount++
  778. }
  779. response.Id = id
  780. response.Created = createAt
  781. response.Model = info.UpstreamModelName
  782. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  783. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  784. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  785. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  786. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  787. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  788. if detail.Modality == "AUDIO" {
  789. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  790. } else if detail.Modality == "TEXT" {
  791. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  792. }
  793. }
  794. }
  795. if info.SendResponseCount == 0 {
  796. // send first response
  797. err = handleStream(c, info, helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil))
  798. if err != nil {
  799. common.LogError(c, err.Error())
  800. }
  801. }
  802. err = handleStream(c, info, response)
  803. if err != nil {
  804. common.LogError(c, err.Error())
  805. }
  806. if isStop {
  807. _ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop))
  808. }
  809. return true
  810. })
  811. if info.SendResponseCount == 0 {
  812. // 空补全,报错不计费
  813. // empty response, throw an error
  814. return nil, types.NewOpenAIError(errors.New("no response received from Gemini API"), types.ErrorCodeEmptyResponse, http.StatusInternalServerError)
  815. }
  816. if imageCount != 0 {
  817. if usage.CompletionTokens == 0 {
  818. usage.CompletionTokens = imageCount * 258
  819. }
  820. }
  821. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  822. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  823. response := helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  824. err := handleFinalStream(c, info, response)
  825. if err != nil {
  826. common.SysError("send final response failed: " + err.Error())
  827. }
  828. //if info.RelayFormat == relaycommon.RelayFormatOpenAI {
  829. // helper.Done(c)
  830. //}
  831. //resp.Body.Close()
  832. return usage, nil
  833. }
  834. func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  835. responseBody, err := io.ReadAll(resp.Body)
  836. if err != nil {
  837. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  838. }
  839. common.CloseResponseBodyGracefully(resp)
  840. if common.DebugEnabled {
  841. println(string(responseBody))
  842. }
  843. var geminiResponse GeminiChatResponse
  844. err = common.Unmarshal(responseBody, &geminiResponse)
  845. if err != nil {
  846. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  847. }
  848. if len(geminiResponse.Candidates) == 0 {
  849. return nil, types.NewOpenAIError(errors.New("no candidates returned"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  850. }
  851. fullTextResponse := responseGeminiChat2OpenAI(c, &geminiResponse)
  852. fullTextResponse.Model = info.UpstreamModelName
  853. usage := dto.Usage{
  854. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  855. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  856. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  857. }
  858. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  859. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  860. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  861. if detail.Modality == "AUDIO" {
  862. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  863. } else if detail.Modality == "TEXT" {
  864. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  865. }
  866. }
  867. fullTextResponse.Usage = usage
  868. jsonResponse, err := json.Marshal(fullTextResponse)
  869. if err != nil {
  870. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  871. }
  872. c.Writer.Header().Set("Content-Type", "application/json")
  873. c.Writer.WriteHeader(resp.StatusCode)
  874. c.Writer.Write(jsonResponse)
  875. return &usage, nil
  876. }
  877. func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  878. defer common.CloseResponseBodyGracefully(resp)
  879. responseBody, readErr := io.ReadAll(resp.Body)
  880. if readErr != nil {
  881. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  882. }
  883. var geminiResponse GeminiEmbeddingResponse
  884. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  885. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  886. }
  887. // convert to openai format response
  888. openAIResponse := dto.OpenAIEmbeddingResponse{
  889. Object: "list",
  890. Data: []dto.OpenAIEmbeddingResponseItem{
  891. {
  892. Object: "embedding",
  893. Embedding: geminiResponse.Embedding.Values,
  894. Index: 0,
  895. },
  896. },
  897. Model: info.UpstreamModelName,
  898. }
  899. // calculate usage
  900. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  901. // Google has not yet clarified how embedding models will be billed
  902. // refer to openai billing method to use input tokens billing
  903. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  904. usage := &dto.Usage{
  905. PromptTokens: info.PromptTokens,
  906. CompletionTokens: 0,
  907. TotalTokens: info.PromptTokens,
  908. }
  909. openAIResponse.Usage = *usage
  910. jsonResponse, jsonErr := common.Marshal(openAIResponse)
  911. if jsonErr != nil {
  912. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  913. }
  914. common.IOCopyBytesGracefully(c, resp, jsonResponse)
  915. return usage, nil
  916. }
  917. func GeminiImageHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  918. responseBody, readErr := io.ReadAll(resp.Body)
  919. if readErr != nil {
  920. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  921. }
  922. _ = resp.Body.Close()
  923. var geminiResponse GeminiImageResponse
  924. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  925. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  926. }
  927. if len(geminiResponse.Predictions) == 0 {
  928. return nil, types.NewOpenAIError(errors.New("no images generated"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  929. }
  930. // convert to openai format response
  931. openAIResponse := dto.ImageResponse{
  932. Created: common.GetTimestamp(),
  933. Data: make([]dto.ImageData, 0, len(geminiResponse.Predictions)),
  934. }
  935. for _, prediction := range geminiResponse.Predictions {
  936. if prediction.RaiFilteredReason != "" {
  937. continue // skip filtered image
  938. }
  939. openAIResponse.Data = append(openAIResponse.Data, dto.ImageData{
  940. B64Json: prediction.BytesBase64Encoded,
  941. })
  942. }
  943. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  944. if jsonErr != nil {
  945. return nil, types.NewError(jsonErr, types.ErrorCodeBadResponseBody)
  946. }
  947. c.Writer.Header().Set("Content-Type", "application/json")
  948. c.Writer.WriteHeader(resp.StatusCode)
  949. _, _ = c.Writer.Write(jsonResponse)
  950. // https://github.com/google-gemini/cookbook/blob/719a27d752aac33f39de18a8d3cb42a70874917e/quickstarts/Counting_Tokens.ipynb
  951. // each image has fixed 258 tokens
  952. const imageTokens = 258
  953. generatedImages := len(openAIResponse.Data)
  954. usage := &dto.Usage{
  955. PromptTokens: imageTokens * generatedImages, // each generated image has fixed 258 tokens
  956. CompletionTokens: 0, // image generation does not calculate completion tokens
  957. TotalTokens: imageTokens * generatedImages,
  958. }
  959. return usage, nil
  960. }