relay-gemini.go 56 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740
  1. package gemini
  2. import (
  3. "context"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "net/http"
  9. "strconv"
  10. "strings"
  11. "time"
  12. "unicode/utf8"
  13. "github.com/QuantumNous/new-api/common"
  14. "github.com/QuantumNous/new-api/constant"
  15. "github.com/QuantumNous/new-api/dto"
  16. "github.com/QuantumNous/new-api/logger"
  17. "github.com/QuantumNous/new-api/relay/channel/openai"
  18. relaycommon "github.com/QuantumNous/new-api/relay/common"
  19. "github.com/QuantumNous/new-api/relay/helper"
  20. "github.com/QuantumNous/new-api/service"
  21. "github.com/QuantumNous/new-api/setting/model_setting"
  22. "github.com/QuantumNous/new-api/setting/reasoning"
  23. "github.com/QuantumNous/new-api/types"
  24. "github.com/gin-gonic/gin"
  25. )
  26. // https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/inference?hl=zh-cn#blob
  27. var geminiSupportedMimeTypes = map[string]bool{
  28. "application/pdf": true,
  29. "audio/mpeg": true,
  30. "audio/mp3": true,
  31. "audio/wav": true,
  32. "image/png": true,
  33. "image/jpeg": true,
  34. "image/jpg": true, // support old image/jpeg
  35. "image/webp": true,
  36. "text/plain": true,
  37. "video/mov": true,
  38. "video/mpeg": true,
  39. "video/mp4": true,
  40. "video/mpg": true,
  41. "video/avi": true,
  42. "video/wmv": true,
  43. "video/mpegps": true,
  44. "video/flv": true,
  45. }
  46. const thoughtSignatureBypassValue = "context_engineering_is_the_way_to_go"
  47. // Gemini 允许的思考预算范围
  48. const (
  49. pro25MinBudget = 128
  50. pro25MaxBudget = 32768
  51. flash25MaxBudget = 24576
  52. flash25LiteMinBudget = 512
  53. flash25LiteMaxBudget = 24576
  54. )
  55. func isNew25ProModel(modelName string) bool {
  56. return strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  57. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  58. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  59. }
  60. func is25FlashLiteModel(modelName string) bool {
  61. return strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  62. }
  63. // clampThinkingBudget 根据模型名称将预算限制在允许的范围内
  64. func clampThinkingBudget(modelName string, budget int) int {
  65. isNew25Pro := isNew25ProModel(modelName)
  66. is25FlashLite := is25FlashLiteModel(modelName)
  67. if is25FlashLite {
  68. if budget < flash25LiteMinBudget {
  69. return flash25LiteMinBudget
  70. }
  71. if budget > flash25LiteMaxBudget {
  72. return flash25LiteMaxBudget
  73. }
  74. } else if isNew25Pro {
  75. if budget < pro25MinBudget {
  76. return pro25MinBudget
  77. }
  78. if budget > pro25MaxBudget {
  79. return pro25MaxBudget
  80. }
  81. } else { // 其他模型
  82. if budget < 0 {
  83. return 0
  84. }
  85. if budget > flash25MaxBudget {
  86. return flash25MaxBudget
  87. }
  88. }
  89. return budget
  90. }
  91. // "effort": "high" - Allocates a large portion of tokens for reasoning (approximately 80% of max_tokens)
  92. // "effort": "medium" - Allocates a moderate portion of tokens (approximately 50% of max_tokens)
  93. // "effort": "low" - Allocates a smaller portion of tokens (approximately 20% of max_tokens)
  94. // "effort": "minimal" - Allocates a minimal portion of tokens (approximately 5% of max_tokens)
  95. func clampThinkingBudgetByEffort(modelName string, effort string) int {
  96. isNew25Pro := isNew25ProModel(modelName)
  97. is25FlashLite := is25FlashLiteModel(modelName)
  98. maxBudget := 0
  99. if is25FlashLite {
  100. maxBudget = flash25LiteMaxBudget
  101. }
  102. if isNew25Pro {
  103. maxBudget = pro25MaxBudget
  104. } else {
  105. maxBudget = flash25MaxBudget
  106. }
  107. switch effort {
  108. case "high":
  109. maxBudget = maxBudget * 80 / 100
  110. case "medium":
  111. maxBudget = maxBudget * 50 / 100
  112. case "low":
  113. maxBudget = maxBudget * 20 / 100
  114. case "minimal":
  115. maxBudget = maxBudget * 5 / 100
  116. }
  117. return clampThinkingBudget(modelName, maxBudget)
  118. }
  119. func ThinkingAdaptor(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo, oaiRequest ...dto.GeneralOpenAIRequest) {
  120. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  121. modelName := info.UpstreamModelName
  122. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  123. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  124. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  125. if strings.Contains(modelName, "-thinking-") {
  126. parts := strings.SplitN(modelName, "-thinking-", 2)
  127. if len(parts) == 2 && parts[1] != "" {
  128. if budgetTokens, err := strconv.Atoi(parts[1]); err == nil {
  129. clampedBudget := clampThinkingBudget(modelName, budgetTokens)
  130. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  131. ThinkingBudget: common.GetPointer(clampedBudget),
  132. IncludeThoughts: true,
  133. }
  134. }
  135. }
  136. } else if strings.HasSuffix(modelName, "-thinking") {
  137. unsupportedModels := []string{
  138. "gemini-2.5-pro-preview-05-06",
  139. "gemini-2.5-pro-preview-03-25",
  140. }
  141. isUnsupported := false
  142. for _, unsupportedModel := range unsupportedModels {
  143. if strings.HasPrefix(modelName, unsupportedModel) {
  144. isUnsupported = true
  145. break
  146. }
  147. }
  148. if isUnsupported {
  149. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  150. IncludeThoughts: true,
  151. }
  152. } else {
  153. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  154. IncludeThoughts: true,
  155. }
  156. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  157. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  158. clampedBudget := clampThinkingBudget(modelName, int(budgetTokens))
  159. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampedBudget)
  160. } else {
  161. if len(oaiRequest) > 0 {
  162. // 如果有reasoningEffort参数,则根据其值设置思考预算
  163. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampThinkingBudgetByEffort(modelName, oaiRequest[0].ReasoningEffort))
  164. }
  165. }
  166. }
  167. } else if strings.HasSuffix(modelName, "-nothinking") {
  168. if !isNew25Pro {
  169. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  170. ThinkingBudget: common.GetPointer(0),
  171. }
  172. }
  173. } else if _, level, ok := reasoning.TrimEffortSuffix(info.UpstreamModelName); ok && level != "" {
  174. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  175. IncludeThoughts: true,
  176. ThinkingLevel: level,
  177. }
  178. info.ReasoningEffort = level
  179. }
  180. }
  181. }
  182. // Setting safety to the lowest possible values since Gemini is already powerless enough
  183. func CovertOpenAI2Gemini(c *gin.Context, textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*dto.GeminiChatRequest, error) {
  184. geminiRequest := dto.GeminiChatRequest{
  185. Contents: make([]dto.GeminiChatContent, 0, len(textRequest.Messages)),
  186. GenerationConfig: dto.GeminiChatGenerationConfig{
  187. Temperature: textRequest.Temperature,
  188. TopP: textRequest.TopP,
  189. MaxOutputTokens: textRequest.GetMaxTokens(),
  190. Seed: int64(textRequest.Seed),
  191. },
  192. }
  193. attachThoughtSignature := (info.ChannelType == constant.ChannelTypeGemini ||
  194. info.ChannelType == constant.ChannelTypeVertexAi) &&
  195. model_setting.GetGeminiSettings().FunctionCallThoughtSignatureEnabled
  196. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  197. geminiRequest.GenerationConfig.ResponseModalities = []string{
  198. "TEXT",
  199. "IMAGE",
  200. }
  201. }
  202. if stopSequences := parseStopSequences(textRequest.Stop); len(stopSequences) > 0 {
  203. // Gemini supports up to 5 stop sequences
  204. if len(stopSequences) > 5 {
  205. stopSequences = stopSequences[:5]
  206. }
  207. geminiRequest.GenerationConfig.StopSequences = stopSequences
  208. }
  209. adaptorWithExtraBody := false
  210. // patch extra_body
  211. if len(textRequest.ExtraBody) > 0 {
  212. var extraBody map[string]interface{}
  213. if err := common.Unmarshal(textRequest.ExtraBody, &extraBody); err != nil {
  214. return nil, fmt.Errorf("invalid extra body: %w", err)
  215. }
  216. // eg. {"google":{"thinking_config":{"thinking_budget":5324,"include_thoughts":true}}}
  217. if googleBody, ok := extraBody["google"].(map[string]interface{}); ok {
  218. if !strings.HasSuffix(info.UpstreamModelName, "-nothinking") {
  219. adaptorWithExtraBody = true
  220. // check error param name like thinkingConfig, should be thinking_config
  221. if _, hasErrorParam := googleBody["thinkingConfig"]; hasErrorParam {
  222. return nil, errors.New("extra_body.google.thinkingConfig is not supported, use extra_body.google.thinking_config instead")
  223. }
  224. if thinkingConfig, ok := googleBody["thinking_config"].(map[string]interface{}); ok {
  225. // check error param name like thinkingBudget, should be thinking_budget
  226. if _, hasErrorParam := thinkingConfig["thinkingBudget"]; hasErrorParam {
  227. return nil, errors.New("extra_body.google.thinking_config.thinkingBudget is not supported, use extra_body.google.thinking_config.thinking_budget instead")
  228. }
  229. var hasThinkingConfig bool
  230. var tempThinkingConfig dto.GeminiThinkingConfig
  231. if thinkingBudget, exists := thinkingConfig["thinking_budget"]; exists {
  232. switch v := thinkingBudget.(type) {
  233. case float64:
  234. budgetInt := int(v)
  235. tempThinkingConfig.ThinkingBudget = common.GetPointer(budgetInt)
  236. if budgetInt > 0 {
  237. // 有正数预算
  238. tempThinkingConfig.IncludeThoughts = true
  239. } else {
  240. // 存在但为0或负数,禁用思考
  241. tempThinkingConfig.IncludeThoughts = false
  242. }
  243. hasThinkingConfig = true
  244. default:
  245. return nil, errors.New("extra_body.google.thinking_config.thinking_budget must be an integer")
  246. }
  247. }
  248. if includeThoughts, exists := thinkingConfig["include_thoughts"]; exists {
  249. if v, ok := includeThoughts.(bool); ok {
  250. tempThinkingConfig.IncludeThoughts = v
  251. hasThinkingConfig = true
  252. } else {
  253. return nil, errors.New("extra_body.google.thinking_config.include_thoughts must be a boolean")
  254. }
  255. }
  256. if thinkingLevel, exists := thinkingConfig["thinking_level"]; exists {
  257. if v, ok := thinkingLevel.(string); ok {
  258. tempThinkingConfig.ThinkingLevel = v
  259. hasThinkingConfig = true
  260. } else {
  261. return nil, errors.New("extra_body.google.thinking_config.thinking_level must be a string")
  262. }
  263. }
  264. if hasThinkingConfig {
  265. // 避免 panic: 仅在获得配置时分配,防止后续赋值时空指针
  266. if geminiRequest.GenerationConfig.ThinkingConfig == nil {
  267. geminiRequest.GenerationConfig.ThinkingConfig = &tempThinkingConfig
  268. } else {
  269. // 如果已分配,则合并内容
  270. if tempThinkingConfig.ThinkingBudget != nil {
  271. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = tempThinkingConfig.ThinkingBudget
  272. }
  273. geminiRequest.GenerationConfig.ThinkingConfig.IncludeThoughts = tempThinkingConfig.IncludeThoughts
  274. if tempThinkingConfig.ThinkingLevel != "" {
  275. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingLevel = tempThinkingConfig.ThinkingLevel
  276. }
  277. }
  278. }
  279. }
  280. }
  281. // check error param name like imageConfig, should be image_config
  282. if _, hasErrorParam := googleBody["imageConfig"]; hasErrorParam {
  283. return nil, errors.New("extra_body.google.imageConfig is not supported, use extra_body.google.image_config instead")
  284. }
  285. if imageConfig, ok := googleBody["image_config"].(map[string]interface{}); ok {
  286. // check error param name like aspectRatio, should be aspect_ratio
  287. if _, hasErrorParam := imageConfig["aspectRatio"]; hasErrorParam {
  288. return nil, errors.New("extra_body.google.image_config.aspectRatio is not supported, use extra_body.google.image_config.aspect_ratio instead")
  289. }
  290. // check error param name like imageSize, should be image_size
  291. if _, hasErrorParam := imageConfig["imageSize"]; hasErrorParam {
  292. return nil, errors.New("extra_body.google.image_config.imageSize is not supported, use extra_body.google.image_config.image_size instead")
  293. }
  294. // convert snake_case to camelCase for Gemini API
  295. geminiImageConfig := make(map[string]interface{})
  296. if aspectRatio, ok := imageConfig["aspect_ratio"]; ok {
  297. geminiImageConfig["aspectRatio"] = aspectRatio
  298. }
  299. if imageSize, ok := imageConfig["image_size"]; ok {
  300. geminiImageConfig["imageSize"] = imageSize
  301. }
  302. if len(geminiImageConfig) > 0 {
  303. imageConfigBytes, err := common.Marshal(geminiImageConfig)
  304. if err != nil {
  305. return nil, fmt.Errorf("failed to marshal image_config: %w", err)
  306. }
  307. geminiRequest.GenerationConfig.ImageConfig = imageConfigBytes
  308. }
  309. }
  310. }
  311. }
  312. if !adaptorWithExtraBody {
  313. ThinkingAdaptor(&geminiRequest, info, textRequest)
  314. }
  315. safetySettings := make([]dto.GeminiChatSafetySettings, 0, len(SafetySettingList))
  316. for _, category := range SafetySettingList {
  317. safetySettings = append(safetySettings, dto.GeminiChatSafetySettings{
  318. Category: category,
  319. Threshold: model_setting.GetGeminiSafetySetting(category),
  320. })
  321. }
  322. geminiRequest.SafetySettings = safetySettings
  323. // openaiContent.FuncToToolCalls()
  324. if textRequest.Tools != nil {
  325. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  326. googleSearch := false
  327. codeExecution := false
  328. urlContext := false
  329. for _, tool := range textRequest.Tools {
  330. if tool.Function.Name == "googleSearch" {
  331. googleSearch = true
  332. continue
  333. }
  334. if tool.Function.Name == "codeExecution" {
  335. codeExecution = true
  336. continue
  337. }
  338. if tool.Function.Name == "urlContext" {
  339. urlContext = true
  340. continue
  341. }
  342. if tool.Function.Parameters != nil {
  343. params, ok := tool.Function.Parameters.(map[string]interface{})
  344. if ok {
  345. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  346. if len(props) == 0 {
  347. tool.Function.Parameters = nil
  348. }
  349. }
  350. }
  351. }
  352. // Clean the parameters before appending
  353. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  354. tool.Function.Parameters = cleanedParams
  355. functions = append(functions, tool.Function)
  356. }
  357. geminiTools := geminiRequest.GetTools()
  358. if codeExecution {
  359. geminiTools = append(geminiTools, dto.GeminiChatTool{
  360. CodeExecution: make(map[string]string),
  361. })
  362. }
  363. if googleSearch {
  364. geminiTools = append(geminiTools, dto.GeminiChatTool{
  365. GoogleSearch: make(map[string]string),
  366. })
  367. }
  368. if urlContext {
  369. geminiTools = append(geminiTools, dto.GeminiChatTool{
  370. URLContext: make(map[string]string),
  371. })
  372. }
  373. if len(functions) > 0 {
  374. geminiTools = append(geminiTools, dto.GeminiChatTool{
  375. FunctionDeclarations: functions,
  376. })
  377. }
  378. geminiRequest.SetTools(geminiTools)
  379. // [NEW] Convert OpenAI tool_choice to Gemini toolConfig.functionCallingConfig
  380. // Mapping: "auto" -> "AUTO", "none" -> "NONE", "required" -> "ANY"
  381. // Object format: {"type": "function", "function": {"name": "xxx"}} -> "ANY" + allowedFunctionNames
  382. if textRequest.ToolChoice != nil {
  383. geminiRequest.ToolConfig = convertToolChoiceToGeminiConfig(textRequest.ToolChoice)
  384. }
  385. }
  386. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  387. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  388. if len(textRequest.ResponseFormat.JsonSchema) > 0 {
  389. // 先将json.RawMessage解析
  390. var jsonSchema dto.FormatJsonSchema
  391. if err := common.Unmarshal(textRequest.ResponseFormat.JsonSchema, &jsonSchema); err == nil {
  392. cleanedSchema := removeAdditionalPropertiesWithDepth(jsonSchema.Schema, 0)
  393. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  394. }
  395. }
  396. }
  397. tool_call_ids := make(map[string]string)
  398. var system_content []string
  399. //shouldAddDummyModelMessage := false
  400. for _, message := range textRequest.Messages {
  401. if message.Role == "system" || message.Role == "developer" {
  402. system_content = append(system_content, message.StringContent())
  403. continue
  404. } else if message.Role == "tool" || message.Role == "function" {
  405. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  406. geminiRequest.Contents = append(geminiRequest.Contents, dto.GeminiChatContent{
  407. Role: "user",
  408. })
  409. }
  410. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  411. name := ""
  412. if message.Name != nil {
  413. name = *message.Name
  414. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  415. name = val
  416. }
  417. var contentMap map[string]interface{}
  418. contentStr := message.StringContent()
  419. // 1. 尝试解析为 JSON 对象
  420. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  421. // 2. 如果失败,尝试解析为 JSON 数组
  422. var contentSlice []interface{}
  423. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  424. // 如果是数组,包装成对象
  425. contentMap = map[string]interface{}{"result": contentSlice}
  426. } else {
  427. // 3. 如果再次失败,作为纯文本处理
  428. contentMap = map[string]interface{}{"content": contentStr}
  429. }
  430. }
  431. functionResp := &dto.GeminiFunctionResponse{
  432. Name: name,
  433. Response: contentMap,
  434. }
  435. *parts = append(*parts, dto.GeminiPart{
  436. FunctionResponse: functionResp,
  437. })
  438. continue
  439. }
  440. var parts []dto.GeminiPart
  441. content := dto.GeminiChatContent{
  442. Role: message.Role,
  443. }
  444. shouldAttachThoughtSignature := attachThoughtSignature && (message.Role == "assistant" || message.Role == "model")
  445. signatureAttached := false
  446. // isToolCall := false
  447. if message.ToolCalls != nil {
  448. // message.Role = "model"
  449. // isToolCall = true
  450. for _, call := range message.ParseToolCalls() {
  451. args := map[string]interface{}{}
  452. if call.Function.Arguments != "" {
  453. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  454. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  455. }
  456. }
  457. toolCall := dto.GeminiPart{
  458. FunctionCall: &dto.FunctionCall{
  459. FunctionName: call.Function.Name,
  460. Arguments: args,
  461. },
  462. }
  463. if shouldAttachThoughtSignature && !signatureAttached && hasFunctionCallContent(toolCall.FunctionCall) && len(toolCall.ThoughtSignature) == 0 {
  464. toolCall.ThoughtSignature = json.RawMessage(strconv.Quote(thoughtSignatureBypassValue))
  465. signatureAttached = true
  466. }
  467. parts = append(parts, toolCall)
  468. tool_call_ids[call.ID] = call.Function.Name
  469. }
  470. }
  471. openaiContent := message.ParseContent()
  472. for _, part := range openaiContent {
  473. if part.Type == dto.ContentTypeText {
  474. if part.Text == "" {
  475. continue
  476. }
  477. // check markdown image ![image](data:image/jpeg;base64,xxxxxxxxxxxx)
  478. // 使用字符串查找而非正则,避免大文本性能问题
  479. text := part.Text
  480. hasMarkdownImage := false
  481. for {
  482. // 快速检查是否包含 markdown 图片标记
  483. startIdx := strings.Index(text, "![")
  484. if startIdx == -1 {
  485. break
  486. }
  487. // 找到 ](
  488. bracketIdx := strings.Index(text[startIdx:], "](data:")
  489. if bracketIdx == -1 {
  490. break
  491. }
  492. bracketIdx += startIdx
  493. // 找到闭合的 )
  494. closeIdx := strings.Index(text[bracketIdx+2:], ")")
  495. if closeIdx == -1 {
  496. break
  497. }
  498. closeIdx += bracketIdx + 2
  499. hasMarkdownImage = true
  500. // 添加图片前的文本
  501. if startIdx > 0 {
  502. textBefore := text[:startIdx]
  503. if textBefore != "" {
  504. parts = append(parts, dto.GeminiPart{
  505. Text: textBefore,
  506. })
  507. }
  508. }
  509. // 提取 data URL (从 "](" 后面开始,到 ")" 之前)
  510. dataUrl := text[bracketIdx+2 : closeIdx]
  511. format, base64String, err := service.DecodeBase64FileData(dataUrl)
  512. if err != nil {
  513. return nil, fmt.Errorf("decode markdown base64 image data failed: %s", err.Error())
  514. }
  515. imgPart := dto.GeminiPart{
  516. InlineData: &dto.GeminiInlineData{
  517. MimeType: format,
  518. Data: base64String,
  519. },
  520. }
  521. if shouldAttachThoughtSignature {
  522. imgPart.ThoughtSignature = json.RawMessage(strconv.Quote(thoughtSignatureBypassValue))
  523. }
  524. parts = append(parts, imgPart)
  525. // 继续处理剩余文本
  526. text = text[closeIdx+1:]
  527. }
  528. // 添加剩余文本或原始文本(如果没有找到 markdown 图片)
  529. if !hasMarkdownImage {
  530. parts = append(parts, dto.GeminiPart{
  531. Text: part.Text,
  532. })
  533. }
  534. } else if part.Type == dto.ContentTypeImageURL {
  535. // 使用统一的文件服务获取图片数据
  536. var source *types.FileSource
  537. imageUrl := part.GetImageMedia().Url
  538. if strings.HasPrefix(imageUrl, "http") {
  539. source = types.NewURLFileSource(imageUrl)
  540. } else {
  541. source = types.NewBase64FileSource(imageUrl, "")
  542. }
  543. base64Data, mimeType, err := service.GetBase64Data(c, source, "formatting image for Gemini")
  544. if err != nil {
  545. return nil, fmt.Errorf("get file data from '%s' failed: %w", source.GetIdentifier(), err)
  546. }
  547. // 校验 MimeType 是否在 Gemini 支持的白名单中
  548. if _, ok := geminiSupportedMimeTypes[strings.ToLower(mimeType)]; !ok {
  549. return nil, fmt.Errorf("mime type is not supported by Gemini: '%s', url: '%s', supported types are: %v", mimeType, source.GetIdentifier(), getSupportedMimeTypesList())
  550. }
  551. parts = append(parts, dto.GeminiPart{
  552. InlineData: &dto.GeminiInlineData{
  553. MimeType: mimeType,
  554. Data: base64Data,
  555. },
  556. })
  557. } else if part.Type == dto.ContentTypeFile {
  558. if part.GetFile().FileId != "" {
  559. return nil, fmt.Errorf("only base64 file is supported in gemini")
  560. }
  561. fileSource := types.NewBase64FileSource(part.GetFile().FileData, "")
  562. base64Data, mimeType, err := service.GetBase64Data(c, fileSource, "formatting file for Gemini")
  563. if err != nil {
  564. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  565. }
  566. parts = append(parts, dto.GeminiPart{
  567. InlineData: &dto.GeminiInlineData{
  568. MimeType: mimeType,
  569. Data: base64Data,
  570. },
  571. })
  572. } else if part.Type == dto.ContentTypeInputAudio {
  573. if part.GetInputAudio().Data == "" {
  574. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  575. }
  576. audioSource := types.NewBase64FileSource(part.GetInputAudio().Data, "audio/"+part.GetInputAudio().Format)
  577. base64Data, mimeType, err := service.GetBase64Data(c, audioSource, "formatting audio for Gemini")
  578. if err != nil {
  579. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  580. }
  581. parts = append(parts, dto.GeminiPart{
  582. InlineData: &dto.GeminiInlineData{
  583. MimeType: mimeType,
  584. Data: base64Data,
  585. },
  586. })
  587. }
  588. }
  589. // 如果需要附加签名但还没有附加(没有 tool_calls 或 tool_calls 为空),
  590. // 则在第一个文本 part 上附加 thoughtSignature
  591. if shouldAttachThoughtSignature && !signatureAttached && len(parts) > 0 {
  592. for i := range parts {
  593. if parts[i].Text != "" {
  594. parts[i].ThoughtSignature = json.RawMessage(strconv.Quote(thoughtSignatureBypassValue))
  595. break
  596. }
  597. }
  598. }
  599. content.Parts = parts
  600. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  601. if content.Role == "assistant" {
  602. content.Role = "model"
  603. }
  604. if len(content.Parts) > 0 {
  605. geminiRequest.Contents = append(geminiRequest.Contents, content)
  606. }
  607. }
  608. if len(system_content) > 0 {
  609. geminiRequest.SystemInstructions = &dto.GeminiChatContent{
  610. Parts: []dto.GeminiPart{
  611. {
  612. Text: strings.Join(system_content, "\n"),
  613. },
  614. },
  615. }
  616. }
  617. return &geminiRequest, nil
  618. }
  619. // parseStopSequences 解析停止序列,支持字符串或字符串数组
  620. func parseStopSequences(stop any) []string {
  621. if stop == nil {
  622. return nil
  623. }
  624. switch v := stop.(type) {
  625. case string:
  626. if v != "" {
  627. return []string{v}
  628. }
  629. case []string:
  630. return v
  631. case []interface{}:
  632. sequences := make([]string, 0, len(v))
  633. for _, item := range v {
  634. if str, ok := item.(string); ok && str != "" {
  635. sequences = append(sequences, str)
  636. }
  637. }
  638. return sequences
  639. }
  640. return nil
  641. }
  642. func hasFunctionCallContent(call *dto.FunctionCall) bool {
  643. if call == nil {
  644. return false
  645. }
  646. if strings.TrimSpace(call.FunctionName) != "" {
  647. return true
  648. }
  649. switch v := call.Arguments.(type) {
  650. case nil:
  651. return false
  652. case string:
  653. return strings.TrimSpace(v) != ""
  654. case map[string]interface{}:
  655. return len(v) > 0
  656. case []interface{}:
  657. return len(v) > 0
  658. default:
  659. return true
  660. }
  661. }
  662. // Helper function to get a list of supported MIME types for error messages
  663. func getSupportedMimeTypesList() []string {
  664. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  665. for k := range geminiSupportedMimeTypes {
  666. keys = append(keys, k)
  667. }
  668. return keys
  669. }
  670. var geminiOpenAPISchemaAllowedFields = map[string]struct{}{
  671. "anyOf": {},
  672. "default": {},
  673. "description": {},
  674. "enum": {},
  675. "example": {},
  676. "format": {},
  677. "items": {},
  678. "maxItems": {},
  679. "maxLength": {},
  680. "maxProperties": {},
  681. "maximum": {},
  682. "minItems": {},
  683. "minLength": {},
  684. "minProperties": {},
  685. "minimum": {},
  686. "nullable": {},
  687. "pattern": {},
  688. "properties": {},
  689. "propertyOrdering": {},
  690. "required": {},
  691. "title": {},
  692. "type": {},
  693. }
  694. const geminiFunctionSchemaMaxDepth = 64
  695. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  696. func cleanFunctionParameters(params interface{}) interface{} {
  697. return cleanFunctionParametersWithDepth(params, 0)
  698. }
  699. func cleanFunctionParametersWithDepth(params interface{}, depth int) interface{} {
  700. if params == nil {
  701. return nil
  702. }
  703. if depth >= geminiFunctionSchemaMaxDepth {
  704. return cleanFunctionParametersShallow(params)
  705. }
  706. switch v := params.(type) {
  707. case map[string]interface{}:
  708. // Keep only Gemini-supported OpenAPI schema subset fields (per official SDK Schema).
  709. cleanedMap := make(map[string]interface{}, len(v))
  710. for k, val := range v {
  711. if _, ok := geminiOpenAPISchemaAllowedFields[k]; ok {
  712. cleanedMap[k] = val
  713. }
  714. }
  715. normalizeGeminiSchemaTypeAndNullable(cleanedMap)
  716. // Clean properties
  717. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  718. cleanedProps := make(map[string]interface{})
  719. for propName, propValue := range props {
  720. cleanedProps[propName] = cleanFunctionParametersWithDepth(propValue, depth+1)
  721. }
  722. cleanedMap["properties"] = cleanedProps
  723. }
  724. // Recursively clean items in arrays
  725. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  726. cleanedMap["items"] = cleanFunctionParametersWithDepth(items, depth+1)
  727. }
  728. // OpenAPI tuple-style items is not supported by Gemini SDK Schema; keep first to avoid API rejection.
  729. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok && len(itemsArray) > 0 {
  730. cleanedMap["items"] = cleanFunctionParametersWithDepth(itemsArray[0], depth+1)
  731. }
  732. // Recursively clean anyOf
  733. if nested, ok := cleanedMap["anyOf"].([]interface{}); ok && nested != nil {
  734. cleanedNested := make([]interface{}, len(nested))
  735. for i, item := range nested {
  736. cleanedNested[i] = cleanFunctionParametersWithDepth(item, depth+1)
  737. }
  738. cleanedMap["anyOf"] = cleanedNested
  739. }
  740. return cleanedMap
  741. case []interface{}:
  742. // Handle arrays of schemas
  743. cleanedArray := make([]interface{}, len(v))
  744. for i, item := range v {
  745. cleanedArray[i] = cleanFunctionParametersWithDepth(item, depth+1)
  746. }
  747. return cleanedArray
  748. default:
  749. // Not a map or array, return as is (e.g., could be a primitive)
  750. return params
  751. }
  752. }
  753. func cleanFunctionParametersShallow(params interface{}) interface{} {
  754. switch v := params.(type) {
  755. case map[string]interface{}:
  756. cleanedMap := make(map[string]interface{}, len(v))
  757. for k, val := range v {
  758. if _, ok := geminiOpenAPISchemaAllowedFields[k]; ok {
  759. cleanedMap[k] = val
  760. }
  761. }
  762. normalizeGeminiSchemaTypeAndNullable(cleanedMap)
  763. // Stop recursion and avoid retaining huge nested structures.
  764. delete(cleanedMap, "properties")
  765. delete(cleanedMap, "items")
  766. delete(cleanedMap, "anyOf")
  767. return cleanedMap
  768. case []interface{}:
  769. // Prefer an empty list over deep recursion on attacker-controlled inputs.
  770. return []interface{}{}
  771. default:
  772. return params
  773. }
  774. }
  775. func normalizeGeminiSchemaTypeAndNullable(schema map[string]interface{}) {
  776. rawType, ok := schema["type"]
  777. if !ok || rawType == nil {
  778. return
  779. }
  780. normalize := func(t string) (string, bool) {
  781. switch strings.ToLower(strings.TrimSpace(t)) {
  782. case "object":
  783. return "OBJECT", false
  784. case "array":
  785. return "ARRAY", false
  786. case "string":
  787. return "STRING", false
  788. case "integer":
  789. return "INTEGER", false
  790. case "number":
  791. return "NUMBER", false
  792. case "boolean":
  793. return "BOOLEAN", false
  794. case "null":
  795. return "", true
  796. default:
  797. return t, false
  798. }
  799. }
  800. switch t := rawType.(type) {
  801. case string:
  802. normalized, isNull := normalize(t)
  803. if isNull {
  804. schema["nullable"] = true
  805. delete(schema, "type")
  806. return
  807. }
  808. schema["type"] = normalized
  809. case []interface{}:
  810. nullable := false
  811. var chosen string
  812. for _, item := range t {
  813. if s, ok := item.(string); ok {
  814. normalized, isNull := normalize(s)
  815. if isNull {
  816. nullable = true
  817. continue
  818. }
  819. if chosen == "" {
  820. chosen = normalized
  821. }
  822. }
  823. }
  824. if nullable {
  825. schema["nullable"] = true
  826. }
  827. if chosen != "" {
  828. schema["type"] = chosen
  829. } else {
  830. delete(schema, "type")
  831. }
  832. }
  833. }
  834. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  835. if depth >= 5 {
  836. return schema
  837. }
  838. v, ok := schema.(map[string]interface{})
  839. if !ok || len(v) == 0 {
  840. return schema
  841. }
  842. // 删除所有的title字段
  843. delete(v, "title")
  844. delete(v, "$schema")
  845. // 如果type不为object和array,则直接返回
  846. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  847. return schema
  848. }
  849. switch v["type"] {
  850. case "object":
  851. delete(v, "additionalProperties")
  852. // 处理 properties
  853. if properties, ok := v["properties"].(map[string]interface{}); ok {
  854. for key, value := range properties {
  855. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  856. }
  857. }
  858. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  859. if nested, ok := v[field].([]interface{}); ok {
  860. for i, item := range nested {
  861. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  862. }
  863. }
  864. }
  865. case "array":
  866. if items, ok := v["items"].(map[string]interface{}); ok {
  867. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  868. }
  869. }
  870. return v
  871. }
  872. func unescapeString(s string) (string, error) {
  873. var result []rune
  874. escaped := false
  875. i := 0
  876. for i < len(s) {
  877. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  878. if r == utf8.RuneError {
  879. return "", fmt.Errorf("invalid UTF-8 encoding")
  880. }
  881. if escaped {
  882. // 如果是转义符后的字符,检查其类型
  883. switch r {
  884. case '"':
  885. result = append(result, '"')
  886. case '\\':
  887. result = append(result, '\\')
  888. case '/':
  889. result = append(result, '/')
  890. case 'b':
  891. result = append(result, '\b')
  892. case 'f':
  893. result = append(result, '\f')
  894. case 'n':
  895. result = append(result, '\n')
  896. case 'r':
  897. result = append(result, '\r')
  898. case 't':
  899. result = append(result, '\t')
  900. case '\'':
  901. result = append(result, '\'')
  902. default:
  903. // 如果遇到一个非法的转义字符,直接按原样输出
  904. result = append(result, '\\', r)
  905. }
  906. escaped = false
  907. } else {
  908. if r == '\\' {
  909. escaped = true // 记录反斜杠作为转义符
  910. } else {
  911. result = append(result, r)
  912. }
  913. }
  914. i += size // 移动到下一个字符
  915. }
  916. return string(result), nil
  917. }
  918. func unescapeMapOrSlice(data interface{}) interface{} {
  919. switch v := data.(type) {
  920. case map[string]interface{}:
  921. for k, val := range v {
  922. v[k] = unescapeMapOrSlice(val)
  923. }
  924. case []interface{}:
  925. for i, val := range v {
  926. v[i] = unescapeMapOrSlice(val)
  927. }
  928. case string:
  929. if unescaped, err := unescapeString(v); err != nil {
  930. return v
  931. } else {
  932. return unescaped
  933. }
  934. }
  935. return data
  936. }
  937. func getResponseToolCall(item *dto.GeminiPart) *dto.ToolCallResponse {
  938. var argsBytes []byte
  939. var err error
  940. // 移除 unescapeMapOrSlice 调用,直接使用 json.Marshal
  941. // JSON 序列化/反序列化已经正确处理了转义字符
  942. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  943. if err != nil {
  944. return nil
  945. }
  946. return &dto.ToolCallResponse{
  947. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  948. Type: "function",
  949. Function: dto.FunctionResponse{
  950. Arguments: string(argsBytes),
  951. Name: item.FunctionCall.FunctionName,
  952. },
  953. }
  954. }
  955. func responseGeminiChat2OpenAI(c *gin.Context, response *dto.GeminiChatResponse) *dto.OpenAITextResponse {
  956. fullTextResponse := dto.OpenAITextResponse{
  957. Id: helper.GetResponseID(c),
  958. Object: "chat.completion",
  959. Created: common.GetTimestamp(),
  960. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  961. }
  962. isToolCall := false
  963. for _, candidate := range response.Candidates {
  964. choice := dto.OpenAITextResponseChoice{
  965. Index: int(candidate.Index),
  966. Message: dto.Message{
  967. Role: "assistant",
  968. Content: "",
  969. },
  970. FinishReason: constant.FinishReasonStop,
  971. }
  972. if len(candidate.Content.Parts) > 0 {
  973. var texts []string
  974. var toolCalls []dto.ToolCallResponse
  975. for _, part := range candidate.Content.Parts {
  976. if part.InlineData != nil {
  977. // 媒体内容
  978. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  979. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  980. texts = append(texts, imgText)
  981. } else {
  982. // 其他媒体类型,直接显示链接
  983. texts = append(texts, fmt.Sprintf("[media](data:%s;base64,%s)", part.InlineData.MimeType, part.InlineData.Data))
  984. }
  985. } else if part.FunctionCall != nil {
  986. choice.FinishReason = constant.FinishReasonToolCalls
  987. if call := getResponseToolCall(&part); call != nil {
  988. toolCalls = append(toolCalls, *call)
  989. }
  990. } else if part.Thought {
  991. choice.Message.ReasoningContent = part.Text
  992. } else {
  993. if part.ExecutableCode != nil {
  994. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  995. } else if part.CodeExecutionResult != nil {
  996. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  997. } else {
  998. // 过滤掉空行
  999. if part.Text != "\n" {
  1000. texts = append(texts, part.Text)
  1001. }
  1002. }
  1003. }
  1004. }
  1005. if len(toolCalls) > 0 {
  1006. choice.Message.SetToolCalls(toolCalls)
  1007. isToolCall = true
  1008. }
  1009. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  1010. }
  1011. if candidate.FinishReason != nil {
  1012. switch *candidate.FinishReason {
  1013. case "STOP":
  1014. choice.FinishReason = constant.FinishReasonStop
  1015. case "MAX_TOKENS":
  1016. choice.FinishReason = constant.FinishReasonLength
  1017. case "SAFETY":
  1018. // Safety filter triggered
  1019. choice.FinishReason = constant.FinishReasonContentFilter
  1020. case "RECITATION":
  1021. // Recitation (citation) detected
  1022. choice.FinishReason = constant.FinishReasonContentFilter
  1023. case "BLOCKLIST":
  1024. // Blocklist triggered
  1025. choice.FinishReason = constant.FinishReasonContentFilter
  1026. case "PROHIBITED_CONTENT":
  1027. // Prohibited content detected
  1028. choice.FinishReason = constant.FinishReasonContentFilter
  1029. case "SPII":
  1030. // Sensitive personally identifiable information
  1031. choice.FinishReason = constant.FinishReasonContentFilter
  1032. case "OTHER":
  1033. // Other reasons
  1034. choice.FinishReason = constant.FinishReasonContentFilter
  1035. default:
  1036. choice.FinishReason = constant.FinishReasonContentFilter
  1037. }
  1038. }
  1039. if isToolCall {
  1040. choice.FinishReason = constant.FinishReasonToolCalls
  1041. }
  1042. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  1043. }
  1044. return &fullTextResponse
  1045. }
  1046. func streamResponseGeminiChat2OpenAI(geminiResponse *dto.GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool) {
  1047. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  1048. isStop := false
  1049. for _, candidate := range geminiResponse.Candidates {
  1050. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  1051. isStop = true
  1052. candidate.FinishReason = nil
  1053. }
  1054. choice := dto.ChatCompletionsStreamResponseChoice{
  1055. Index: int(candidate.Index),
  1056. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  1057. //Role: "assistant",
  1058. },
  1059. }
  1060. var texts []string
  1061. isTools := false
  1062. isThought := false
  1063. if candidate.FinishReason != nil {
  1064. // Map Gemini FinishReason to OpenAI finish_reason
  1065. switch *candidate.FinishReason {
  1066. case "STOP":
  1067. // Normal completion
  1068. choice.FinishReason = &constant.FinishReasonStop
  1069. case "MAX_TOKENS":
  1070. // Reached maximum token limit
  1071. choice.FinishReason = &constant.FinishReasonLength
  1072. case "SAFETY":
  1073. // Safety filter triggered
  1074. choice.FinishReason = &constant.FinishReasonContentFilter
  1075. case "RECITATION":
  1076. // Recitation (citation) detected
  1077. choice.FinishReason = &constant.FinishReasonContentFilter
  1078. case "BLOCKLIST":
  1079. // Blocklist triggered
  1080. choice.FinishReason = &constant.FinishReasonContentFilter
  1081. case "PROHIBITED_CONTENT":
  1082. // Prohibited content detected
  1083. choice.FinishReason = &constant.FinishReasonContentFilter
  1084. case "SPII":
  1085. // Sensitive personally identifiable information
  1086. choice.FinishReason = &constant.FinishReasonContentFilter
  1087. case "OTHER":
  1088. // Other reasons
  1089. choice.FinishReason = &constant.FinishReasonContentFilter
  1090. default:
  1091. // Unknown reason, treat as content filter
  1092. choice.FinishReason = &constant.FinishReasonContentFilter
  1093. }
  1094. }
  1095. for _, part := range candidate.Content.Parts {
  1096. if part.InlineData != nil {
  1097. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  1098. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  1099. texts = append(texts, imgText)
  1100. }
  1101. } else if part.FunctionCall != nil {
  1102. isTools = true
  1103. if call := getResponseToolCall(&part); call != nil {
  1104. call.SetIndex(len(choice.Delta.ToolCalls))
  1105. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  1106. }
  1107. } else if part.Thought {
  1108. isThought = true
  1109. texts = append(texts, part.Text)
  1110. } else {
  1111. if part.ExecutableCode != nil {
  1112. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  1113. } else if part.CodeExecutionResult != nil {
  1114. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  1115. } else {
  1116. if part.Text != "\n" {
  1117. texts = append(texts, part.Text)
  1118. }
  1119. }
  1120. }
  1121. }
  1122. if isThought {
  1123. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  1124. } else {
  1125. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  1126. }
  1127. if isTools {
  1128. choice.FinishReason = &constant.FinishReasonToolCalls
  1129. }
  1130. choices = append(choices, choice)
  1131. }
  1132. var response dto.ChatCompletionsStreamResponse
  1133. response.Object = "chat.completion.chunk"
  1134. response.Choices = choices
  1135. return &response, isStop
  1136. }
  1137. func handleStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  1138. streamData, err := common.Marshal(resp)
  1139. if err != nil {
  1140. return fmt.Errorf("failed to marshal stream response: %w", err)
  1141. }
  1142. err = openai.HandleStreamFormat(c, info, string(streamData), info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  1143. if err != nil {
  1144. return fmt.Errorf("failed to handle stream format: %w", err)
  1145. }
  1146. return nil
  1147. }
  1148. func handleFinalStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  1149. streamData, err := common.Marshal(resp)
  1150. if err != nil {
  1151. return fmt.Errorf("failed to marshal stream response: %w", err)
  1152. }
  1153. openai.HandleFinalResponse(c, info, string(streamData), resp.Id, resp.Created, resp.Model, resp.GetSystemFingerprint(), resp.Usage, false)
  1154. return nil
  1155. }
  1156. func geminiStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response, callback func(data string, geminiResponse *dto.GeminiChatResponse) bool) (*dto.Usage, *types.NewAPIError) {
  1157. var usage = &dto.Usage{}
  1158. var imageCount int
  1159. responseText := strings.Builder{}
  1160. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  1161. var geminiResponse dto.GeminiChatResponse
  1162. err := common.UnmarshalJsonStr(data, &geminiResponse)
  1163. if err != nil {
  1164. logger.LogError(c, "error unmarshalling stream response: "+err.Error())
  1165. return false
  1166. }
  1167. if len(geminiResponse.Candidates) == 0 && geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
  1168. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
  1169. }
  1170. // 统计图片数量
  1171. for _, candidate := range geminiResponse.Candidates {
  1172. for _, part := range candidate.Content.Parts {
  1173. if part.InlineData != nil && part.InlineData.MimeType != "" {
  1174. imageCount++
  1175. }
  1176. if part.Text != "" {
  1177. responseText.WriteString(part.Text)
  1178. }
  1179. }
  1180. }
  1181. // 更新使用量统计
  1182. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  1183. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  1184. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount + geminiResponse.UsageMetadata.ThoughtsTokenCount
  1185. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  1186. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  1187. usage.PromptTokensDetails.CachedTokens = geminiResponse.UsageMetadata.CachedContentTokenCount
  1188. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  1189. if detail.Modality == "AUDIO" {
  1190. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  1191. } else if detail.Modality == "TEXT" {
  1192. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  1193. }
  1194. }
  1195. }
  1196. return callback(data, &geminiResponse)
  1197. })
  1198. if imageCount != 0 {
  1199. if usage.CompletionTokens == 0 {
  1200. usage.CompletionTokens = imageCount * 1400
  1201. }
  1202. }
  1203. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  1204. if usage.TotalTokens > 0 {
  1205. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  1206. }
  1207. if usage.CompletionTokens <= 0 {
  1208. if info.ReceivedResponseCount > 0 {
  1209. usage = service.ResponseText2Usage(c, responseText.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  1210. } else {
  1211. usage = &dto.Usage{}
  1212. }
  1213. }
  1214. return usage, nil
  1215. }
  1216. func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1217. id := helper.GetResponseID(c)
  1218. createAt := common.GetTimestamp()
  1219. finishReason := constant.FinishReasonStop
  1220. toolCallIndexByChoice := make(map[int]map[string]int)
  1221. nextToolCallIndexByChoice := make(map[int]int)
  1222. usage, err := geminiStreamHandler(c, info, resp, func(data string, geminiResponse *dto.GeminiChatResponse) bool {
  1223. response, isStop := streamResponseGeminiChat2OpenAI(geminiResponse)
  1224. response.Id = id
  1225. response.Created = createAt
  1226. response.Model = info.UpstreamModelName
  1227. for choiceIdx := range response.Choices {
  1228. choiceKey := response.Choices[choiceIdx].Index
  1229. for toolIdx := range response.Choices[choiceIdx].Delta.ToolCalls {
  1230. tool := &response.Choices[choiceIdx].Delta.ToolCalls[toolIdx]
  1231. if tool.ID == "" {
  1232. continue
  1233. }
  1234. m := toolCallIndexByChoice[choiceKey]
  1235. if m == nil {
  1236. m = make(map[string]int)
  1237. toolCallIndexByChoice[choiceKey] = m
  1238. }
  1239. if idx, ok := m[tool.ID]; ok {
  1240. tool.SetIndex(idx)
  1241. continue
  1242. }
  1243. idx := nextToolCallIndexByChoice[choiceKey]
  1244. nextToolCallIndexByChoice[choiceKey] = idx + 1
  1245. m[tool.ID] = idx
  1246. tool.SetIndex(idx)
  1247. }
  1248. }
  1249. logger.LogDebug(c, fmt.Sprintf("info.SendResponseCount = %d", info.SendResponseCount))
  1250. if info.SendResponseCount == 0 {
  1251. // send first response
  1252. emptyResponse := helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil)
  1253. if response.IsToolCall() {
  1254. if len(emptyResponse.Choices) > 0 && len(response.Choices) > 0 {
  1255. toolCalls := response.Choices[0].Delta.ToolCalls
  1256. copiedToolCalls := make([]dto.ToolCallResponse, len(toolCalls))
  1257. for idx := range toolCalls {
  1258. copiedToolCalls[idx] = toolCalls[idx]
  1259. copiedToolCalls[idx].Function.Arguments = ""
  1260. }
  1261. emptyResponse.Choices[0].Delta.ToolCalls = copiedToolCalls
  1262. }
  1263. finishReason = constant.FinishReasonToolCalls
  1264. err := handleStream(c, info, emptyResponse)
  1265. if err != nil {
  1266. logger.LogError(c, err.Error())
  1267. }
  1268. response.ClearToolCalls()
  1269. if response.IsFinished() {
  1270. response.Choices[0].FinishReason = nil
  1271. }
  1272. } else {
  1273. err := handleStream(c, info, emptyResponse)
  1274. if err != nil {
  1275. logger.LogError(c, err.Error())
  1276. }
  1277. }
  1278. }
  1279. err := handleStream(c, info, response)
  1280. if err != nil {
  1281. logger.LogError(c, err.Error())
  1282. }
  1283. if isStop {
  1284. _ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, finishReason))
  1285. }
  1286. return true
  1287. })
  1288. if err != nil {
  1289. return usage, err
  1290. }
  1291. response := helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  1292. handleErr := handleFinalStream(c, info, response)
  1293. if handleErr != nil {
  1294. common.SysLog("send final response failed: " + handleErr.Error())
  1295. }
  1296. return usage, nil
  1297. }
  1298. func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1299. responseBody, err := io.ReadAll(resp.Body)
  1300. if err != nil {
  1301. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1302. }
  1303. service.CloseResponseBodyGracefully(resp)
  1304. if common.DebugEnabled {
  1305. println(string(responseBody))
  1306. }
  1307. var geminiResponse dto.GeminiChatResponse
  1308. err = common.Unmarshal(responseBody, &geminiResponse)
  1309. if err != nil {
  1310. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1311. }
  1312. if len(geminiResponse.Candidates) == 0 {
  1313. usage := dto.Usage{
  1314. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  1315. }
  1316. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  1317. usage.PromptTokensDetails.CachedTokens = geminiResponse.UsageMetadata.CachedContentTokenCount
  1318. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  1319. if detail.Modality == "AUDIO" {
  1320. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  1321. } else if detail.Modality == "TEXT" {
  1322. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  1323. }
  1324. }
  1325. if usage.PromptTokens <= 0 {
  1326. usage.PromptTokens = info.GetEstimatePromptTokens()
  1327. }
  1328. var newAPIError *types.NewAPIError
  1329. if geminiResponse.PromptFeedback != nil && geminiResponse.PromptFeedback.BlockReason != nil {
  1330. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, fmt.Sprintf("gemini_block_reason=%s", *geminiResponse.PromptFeedback.BlockReason))
  1331. newAPIError = types.NewOpenAIError(
  1332. errors.New("request blocked by Gemini API: "+*geminiResponse.PromptFeedback.BlockReason),
  1333. types.ErrorCodePromptBlocked,
  1334. http.StatusBadRequest,
  1335. )
  1336. } else {
  1337. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, "gemini_empty_candidates")
  1338. newAPIError = types.NewOpenAIError(
  1339. errors.New("empty response from Gemini API"),
  1340. types.ErrorCodeEmptyResponse,
  1341. http.StatusInternalServerError,
  1342. )
  1343. }
  1344. service.ResetStatusCode(newAPIError, c.GetString("status_code_mapping"))
  1345. switch info.RelayFormat {
  1346. case types.RelayFormatClaude:
  1347. c.JSON(newAPIError.StatusCode, gin.H{
  1348. "type": "error",
  1349. "error": newAPIError.ToClaudeError(),
  1350. })
  1351. default:
  1352. c.JSON(newAPIError.StatusCode, gin.H{
  1353. "error": newAPIError.ToOpenAIError(),
  1354. })
  1355. }
  1356. return &usage, nil
  1357. }
  1358. fullTextResponse := responseGeminiChat2OpenAI(c, &geminiResponse)
  1359. fullTextResponse.Model = info.UpstreamModelName
  1360. usage := dto.Usage{
  1361. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  1362. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  1363. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  1364. }
  1365. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  1366. usage.PromptTokensDetails.CachedTokens = geminiResponse.UsageMetadata.CachedContentTokenCount
  1367. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  1368. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  1369. if detail.Modality == "AUDIO" {
  1370. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  1371. } else if detail.Modality == "TEXT" {
  1372. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  1373. }
  1374. }
  1375. fullTextResponse.Usage = usage
  1376. switch info.RelayFormat {
  1377. case types.RelayFormatOpenAI:
  1378. responseBody, err = common.Marshal(fullTextResponse)
  1379. if err != nil {
  1380. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  1381. }
  1382. case types.RelayFormatClaude:
  1383. claudeResp := service.ResponseOpenAI2Claude(fullTextResponse, info)
  1384. claudeRespStr, err := common.Marshal(claudeResp)
  1385. if err != nil {
  1386. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  1387. }
  1388. responseBody = claudeRespStr
  1389. case types.RelayFormatGemini:
  1390. break
  1391. }
  1392. service.IOCopyBytesGracefully(c, resp, responseBody)
  1393. return &usage, nil
  1394. }
  1395. func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1396. defer service.CloseResponseBodyGracefully(resp)
  1397. responseBody, readErr := io.ReadAll(resp.Body)
  1398. if readErr != nil {
  1399. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1400. }
  1401. var geminiResponse dto.GeminiBatchEmbeddingResponse
  1402. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  1403. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1404. }
  1405. // convert to openai format response
  1406. openAIResponse := dto.OpenAIEmbeddingResponse{
  1407. Object: "list",
  1408. Data: make([]dto.OpenAIEmbeddingResponseItem, 0, len(geminiResponse.Embeddings)),
  1409. Model: info.UpstreamModelName,
  1410. }
  1411. for i, embedding := range geminiResponse.Embeddings {
  1412. openAIResponse.Data = append(openAIResponse.Data, dto.OpenAIEmbeddingResponseItem{
  1413. Object: "embedding",
  1414. Embedding: embedding.Values,
  1415. Index: i,
  1416. })
  1417. }
  1418. // calculate usage
  1419. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  1420. // Google has not yet clarified how embedding models will be billed
  1421. // refer to openai billing method to use input tokens billing
  1422. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  1423. usage := service.ResponseText2Usage(c, "", info.UpstreamModelName, info.GetEstimatePromptTokens())
  1424. openAIResponse.Usage = *usage
  1425. jsonResponse, jsonErr := common.Marshal(openAIResponse)
  1426. if jsonErr != nil {
  1427. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1428. }
  1429. service.IOCopyBytesGracefully(c, resp, jsonResponse)
  1430. return usage, nil
  1431. }
  1432. func GeminiImageHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1433. responseBody, readErr := io.ReadAll(resp.Body)
  1434. if readErr != nil {
  1435. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1436. }
  1437. _ = resp.Body.Close()
  1438. var geminiResponse dto.GeminiImageResponse
  1439. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  1440. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1441. }
  1442. if len(geminiResponse.Predictions) == 0 {
  1443. return nil, types.NewOpenAIError(errors.New("no images generated"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1444. }
  1445. // convert to openai format response
  1446. openAIResponse := dto.ImageResponse{
  1447. Created: common.GetTimestamp(),
  1448. Data: make([]dto.ImageData, 0, len(geminiResponse.Predictions)),
  1449. }
  1450. for _, prediction := range geminiResponse.Predictions {
  1451. if prediction.RaiFilteredReason != "" {
  1452. continue // skip filtered image
  1453. }
  1454. openAIResponse.Data = append(openAIResponse.Data, dto.ImageData{
  1455. B64Json: prediction.BytesBase64Encoded,
  1456. })
  1457. }
  1458. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  1459. if jsonErr != nil {
  1460. return nil, types.NewError(jsonErr, types.ErrorCodeBadResponseBody)
  1461. }
  1462. c.Writer.Header().Set("Content-Type", "application/json")
  1463. c.Writer.WriteHeader(resp.StatusCode)
  1464. _, _ = c.Writer.Write(jsonResponse)
  1465. // https://github.com/google-gemini/cookbook/blob/719a27d752aac33f39de18a8d3cb42a70874917e/quickstarts/Counting_Tokens.ipynb
  1466. // each image has fixed 258 tokens
  1467. const imageTokens = 258
  1468. generatedImages := len(openAIResponse.Data)
  1469. usage := &dto.Usage{
  1470. PromptTokens: imageTokens * generatedImages, // each generated image has fixed 258 tokens
  1471. CompletionTokens: 0, // image generation does not calculate completion tokens
  1472. TotalTokens: imageTokens * generatedImages,
  1473. }
  1474. return usage, nil
  1475. }
  1476. type GeminiModelsResponse struct {
  1477. Models []dto.GeminiModel `json:"models"`
  1478. NextPageToken string `json:"nextPageToken"`
  1479. }
  1480. func FetchGeminiModels(baseURL, apiKey, proxyURL string) ([]string, error) {
  1481. client, err := service.GetHttpClientWithProxy(proxyURL)
  1482. if err != nil {
  1483. return nil, fmt.Errorf("创建HTTP客户端失败: %v", err)
  1484. }
  1485. allModels := make([]string, 0)
  1486. nextPageToken := ""
  1487. maxPages := 100 // Safety limit to prevent infinite loops
  1488. for page := 0; page < maxPages; page++ {
  1489. url := fmt.Sprintf("%s/v1beta/models", baseURL)
  1490. if nextPageToken != "" {
  1491. url = fmt.Sprintf("%s?pageToken=%s", url, nextPageToken)
  1492. }
  1493. ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
  1494. request, err := http.NewRequestWithContext(ctx, "GET", url, nil)
  1495. if err != nil {
  1496. cancel()
  1497. return nil, fmt.Errorf("创建请求失败: %v", err)
  1498. }
  1499. request.Header.Set("x-goog-api-key", apiKey)
  1500. response, err := client.Do(request)
  1501. if err != nil {
  1502. cancel()
  1503. return nil, fmt.Errorf("请求失败: %v", err)
  1504. }
  1505. if response.StatusCode != http.StatusOK {
  1506. body, _ := io.ReadAll(response.Body)
  1507. response.Body.Close()
  1508. cancel()
  1509. return nil, fmt.Errorf("服务器返回错误 %d: %s", response.StatusCode, string(body))
  1510. }
  1511. body, err := io.ReadAll(response.Body)
  1512. response.Body.Close()
  1513. cancel()
  1514. if err != nil {
  1515. return nil, fmt.Errorf("读取响应失败: %v", err)
  1516. }
  1517. var modelsResponse GeminiModelsResponse
  1518. if err = common.Unmarshal(body, &modelsResponse); err != nil {
  1519. return nil, fmt.Errorf("解析响应失败: %v", err)
  1520. }
  1521. for _, model := range modelsResponse.Models {
  1522. modelNameValue, ok := model.Name.(string)
  1523. if !ok {
  1524. continue
  1525. }
  1526. modelName := strings.TrimPrefix(modelNameValue, "models/")
  1527. allModels = append(allModels, modelName)
  1528. }
  1529. nextPageToken = modelsResponse.NextPageToken
  1530. if nextPageToken == "" {
  1531. break
  1532. }
  1533. }
  1534. return allModels, nil
  1535. }
  1536. // convertToolChoiceToGeminiConfig converts OpenAI tool_choice to Gemini toolConfig
  1537. // OpenAI tool_choice values:
  1538. // - "auto": Let the model decide (default)
  1539. // - "none": Don't call any tools
  1540. // - "required": Must call at least one tool
  1541. // - {"type": "function", "function": {"name": "xxx"}}: Call specific function
  1542. //
  1543. // Gemini functionCallingConfig.mode values:
  1544. // - "AUTO": Model decides whether to call functions
  1545. // - "NONE": Model won't call functions
  1546. // - "ANY": Model must call at least one function
  1547. func convertToolChoiceToGeminiConfig(toolChoice any) *dto.ToolConfig {
  1548. if toolChoice == nil {
  1549. return nil
  1550. }
  1551. // Handle string values: "auto", "none", "required"
  1552. if toolChoiceStr, ok := toolChoice.(string); ok {
  1553. config := &dto.ToolConfig{
  1554. FunctionCallingConfig: &dto.FunctionCallingConfig{},
  1555. }
  1556. switch toolChoiceStr {
  1557. case "auto":
  1558. config.FunctionCallingConfig.Mode = "AUTO"
  1559. case "none":
  1560. config.FunctionCallingConfig.Mode = "NONE"
  1561. case "required":
  1562. config.FunctionCallingConfig.Mode = "ANY"
  1563. default:
  1564. // Unknown string value, default to AUTO
  1565. config.FunctionCallingConfig.Mode = "AUTO"
  1566. }
  1567. return config
  1568. }
  1569. // Handle object value: {"type": "function", "function": {"name": "xxx"}}
  1570. if toolChoiceMap, ok := toolChoice.(map[string]interface{}); ok {
  1571. if toolChoiceMap["type"] == "function" {
  1572. config := &dto.ToolConfig{
  1573. FunctionCallingConfig: &dto.FunctionCallingConfig{
  1574. Mode: "ANY",
  1575. },
  1576. }
  1577. // Extract function name if specified
  1578. if function, ok := toolChoiceMap["function"].(map[string]interface{}); ok {
  1579. if name, ok := function["name"].(string); ok && name != "" {
  1580. config.FunctionCallingConfig.AllowedFunctionNames = []string{name}
  1581. }
  1582. }
  1583. return config
  1584. }
  1585. // Unsupported map structure (type is not "function"), return nil
  1586. return nil
  1587. }
  1588. // Unsupported type, return nil
  1589. return nil
  1590. }