relay-gemini.go 38 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "net/http"
  8. "one-api/common"
  9. "one-api/constant"
  10. "one-api/dto"
  11. "one-api/logger"
  12. "one-api/relay/channel/openai"
  13. relaycommon "one-api/relay/common"
  14. "one-api/relay/helper"
  15. "one-api/service"
  16. "one-api/setting/model_setting"
  17. "one-api/types"
  18. "strconv"
  19. "strings"
  20. "unicode/utf8"
  21. "github.com/gin-gonic/gin"
  22. )
  23. // https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/inference?hl=zh-cn#blob
  24. var geminiSupportedMimeTypes = map[string]bool{
  25. "application/pdf": true,
  26. "audio/mpeg": true,
  27. "audio/mp3": true,
  28. "audio/wav": true,
  29. "image/png": true,
  30. "image/jpeg": true,
  31. "image/webp": true,
  32. "text/plain": true,
  33. "video/mov": true,
  34. "video/mpeg": true,
  35. "video/mp4": true,
  36. "video/mpg": true,
  37. "video/avi": true,
  38. "video/wmv": true,
  39. "video/mpegps": true,
  40. "video/flv": true,
  41. }
  42. // Gemini 允许的思考预算范围
  43. const (
  44. pro25MinBudget = 128
  45. pro25MaxBudget = 32768
  46. flash25MaxBudget = 24576
  47. flash25LiteMinBudget = 512
  48. flash25LiteMaxBudget = 24576
  49. )
  50. func isNew25ProModel(modelName string) bool {
  51. return strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  52. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  53. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  54. }
  55. func is25FlashLiteModel(modelName string) bool {
  56. return strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  57. }
  58. // clampThinkingBudget 根据模型名称将预算限制在允许的范围内
  59. func clampThinkingBudget(modelName string, budget int) int {
  60. isNew25Pro := isNew25ProModel(modelName)
  61. is25FlashLite := is25FlashLiteModel(modelName)
  62. if is25FlashLite {
  63. if budget < flash25LiteMinBudget {
  64. return flash25LiteMinBudget
  65. }
  66. if budget > flash25LiteMaxBudget {
  67. return flash25LiteMaxBudget
  68. }
  69. } else if isNew25Pro {
  70. if budget < pro25MinBudget {
  71. return pro25MinBudget
  72. }
  73. if budget > pro25MaxBudget {
  74. return pro25MaxBudget
  75. }
  76. } else { // 其他模型
  77. if budget < 0 {
  78. return 0
  79. }
  80. if budget > flash25MaxBudget {
  81. return flash25MaxBudget
  82. }
  83. }
  84. return budget
  85. }
  86. // "effort": "high" - Allocates a large portion of tokens for reasoning (approximately 80% of max_tokens)
  87. // "effort": "medium" - Allocates a moderate portion of tokens (approximately 50% of max_tokens)
  88. // "effort": "low" - Allocates a smaller portion of tokens (approximately 20% of max_tokens)
  89. func clampThinkingBudgetByEffort(modelName string, effort string) int {
  90. isNew25Pro := isNew25ProModel(modelName)
  91. is25FlashLite := is25FlashLiteModel(modelName)
  92. maxBudget := 0
  93. if is25FlashLite {
  94. maxBudget = flash25LiteMaxBudget
  95. }
  96. if isNew25Pro {
  97. maxBudget = pro25MaxBudget
  98. } else {
  99. maxBudget = flash25MaxBudget
  100. }
  101. switch effort {
  102. case "high":
  103. maxBudget = maxBudget * 80 / 100
  104. case "medium":
  105. maxBudget = maxBudget * 50 / 100
  106. case "low":
  107. maxBudget = maxBudget * 20 / 100
  108. }
  109. return clampThinkingBudget(modelName, maxBudget)
  110. }
  111. func ThinkingAdaptor(geminiRequest *dto.GeminiChatRequest, info *relaycommon.RelayInfo, oaiRequest ...dto.GeneralOpenAIRequest) {
  112. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  113. modelName := info.UpstreamModelName
  114. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  115. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  116. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  117. if strings.Contains(modelName, "-thinking-") {
  118. parts := strings.SplitN(modelName, "-thinking-", 2)
  119. if len(parts) == 2 && parts[1] != "" {
  120. if budgetTokens, err := strconv.Atoi(parts[1]); err == nil {
  121. clampedBudget := clampThinkingBudget(modelName, budgetTokens)
  122. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  123. ThinkingBudget: common.GetPointer(clampedBudget),
  124. IncludeThoughts: true,
  125. }
  126. }
  127. }
  128. } else if strings.HasSuffix(modelName, "-thinking") {
  129. unsupportedModels := []string{
  130. "gemini-2.5-pro-preview-05-06",
  131. "gemini-2.5-pro-preview-03-25",
  132. }
  133. isUnsupported := false
  134. for _, unsupportedModel := range unsupportedModels {
  135. if strings.HasPrefix(modelName, unsupportedModel) {
  136. isUnsupported = true
  137. break
  138. }
  139. }
  140. if isUnsupported {
  141. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  142. IncludeThoughts: true,
  143. }
  144. } else {
  145. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  146. IncludeThoughts: true,
  147. }
  148. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  149. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  150. clampedBudget := clampThinkingBudget(modelName, int(budgetTokens))
  151. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampedBudget)
  152. } else {
  153. if len(oaiRequest) > 0 {
  154. // 如果有reasoningEffort参数,则根据其值设置思考预算
  155. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampThinkingBudgetByEffort(modelName, oaiRequest[0].ReasoningEffort))
  156. }
  157. }
  158. }
  159. } else if strings.HasSuffix(modelName, "-nothinking") {
  160. if !isNew25Pro {
  161. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  162. ThinkingBudget: common.GetPointer(0),
  163. }
  164. }
  165. }
  166. }
  167. }
  168. // Setting safety to the lowest possible values since Gemini is already powerless enough
  169. func CovertGemini2OpenAI(c *gin.Context, textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*dto.GeminiChatRequest, error) {
  170. geminiRequest := dto.GeminiChatRequest{
  171. Contents: make([]dto.GeminiChatContent, 0, len(textRequest.Messages)),
  172. GenerationConfig: dto.GeminiChatGenerationConfig{
  173. Temperature: textRequest.Temperature,
  174. TopP: textRequest.TopP,
  175. MaxOutputTokens: textRequest.GetMaxTokens(),
  176. Seed: int64(textRequest.Seed),
  177. },
  178. }
  179. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  180. geminiRequest.GenerationConfig.ResponseModalities = []string{
  181. "TEXT",
  182. "IMAGE",
  183. }
  184. }
  185. adaptorWithExtraBody := false
  186. if len(textRequest.ExtraBody) > 0 {
  187. if !strings.HasSuffix(info.UpstreamModelName, "-nothinking") {
  188. var extraBody map[string]interface{}
  189. if err := common.Unmarshal(textRequest.ExtraBody, &extraBody); err != nil {
  190. return nil, fmt.Errorf("invalid extra body: %w", err)
  191. }
  192. // eg. {"google":{"thinking_config":{"thinking_budget":5324,"include_thoughts":true}}}
  193. if googleBody, ok := extraBody["google"].(map[string]interface{}); ok {
  194. adaptorWithExtraBody = true
  195. if thinkingConfig, ok := googleBody["thinking_config"].(map[string]interface{}); ok {
  196. if budget, ok := thinkingConfig["thinking_budget"].(float64); ok {
  197. budgetInt := int(budget)
  198. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  199. ThinkingBudget: common.GetPointer(budgetInt),
  200. IncludeThoughts: true,
  201. }
  202. } else {
  203. geminiRequest.GenerationConfig.ThinkingConfig = &dto.GeminiThinkingConfig{
  204. IncludeThoughts: true,
  205. }
  206. }
  207. }
  208. }
  209. }
  210. }
  211. if !adaptorWithExtraBody {
  212. ThinkingAdaptor(&geminiRequest, info, textRequest)
  213. }
  214. safetySettings := make([]dto.GeminiChatSafetySettings, 0, len(SafetySettingList))
  215. for _, category := range SafetySettingList {
  216. safetySettings = append(safetySettings, dto.GeminiChatSafetySettings{
  217. Category: category,
  218. Threshold: model_setting.GetGeminiSafetySetting(category),
  219. })
  220. }
  221. geminiRequest.SafetySettings = safetySettings
  222. // openaiContent.FuncToToolCalls()
  223. if textRequest.Tools != nil {
  224. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  225. googleSearch := false
  226. codeExecution := false
  227. urlContext := false
  228. for _, tool := range textRequest.Tools {
  229. if tool.Function.Name == "googleSearch" {
  230. googleSearch = true
  231. continue
  232. }
  233. if tool.Function.Name == "codeExecution" {
  234. codeExecution = true
  235. continue
  236. }
  237. if tool.Function.Name == "urlContext" {
  238. urlContext = true
  239. continue
  240. }
  241. if tool.Function.Parameters != nil {
  242. params, ok := tool.Function.Parameters.(map[string]interface{})
  243. if ok {
  244. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  245. if len(props) == 0 {
  246. tool.Function.Parameters = nil
  247. }
  248. }
  249. }
  250. }
  251. // Clean the parameters before appending
  252. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  253. tool.Function.Parameters = cleanedParams
  254. functions = append(functions, tool.Function)
  255. }
  256. geminiTools := geminiRequest.GetTools()
  257. if codeExecution {
  258. geminiTools = append(geminiTools, dto.GeminiChatTool{
  259. CodeExecution: make(map[string]string),
  260. })
  261. }
  262. if googleSearch {
  263. geminiTools = append(geminiTools, dto.GeminiChatTool{
  264. GoogleSearch: make(map[string]string),
  265. })
  266. }
  267. if urlContext {
  268. geminiTools = append(geminiTools, dto.GeminiChatTool{
  269. URLContext: make(map[string]string),
  270. })
  271. }
  272. if len(functions) > 0 {
  273. geminiTools = append(geminiTools, dto.GeminiChatTool{
  274. FunctionDeclarations: functions,
  275. })
  276. }
  277. geminiRequest.SetTools(geminiTools)
  278. }
  279. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  280. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  281. if len(textRequest.ResponseFormat.JsonSchema) > 0 {
  282. // 先将json.RawMessage解析
  283. var jsonSchema dto.FormatJsonSchema
  284. if err := common.Unmarshal(textRequest.ResponseFormat.JsonSchema, &jsonSchema); err == nil {
  285. cleanedSchema := removeAdditionalPropertiesWithDepth(jsonSchema.Schema, 0)
  286. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  287. }
  288. }
  289. }
  290. tool_call_ids := make(map[string]string)
  291. var system_content []string
  292. //shouldAddDummyModelMessage := false
  293. for _, message := range textRequest.Messages {
  294. if message.Role == "system" {
  295. system_content = append(system_content, message.StringContent())
  296. continue
  297. } else if message.Role == "tool" || message.Role == "function" {
  298. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  299. geminiRequest.Contents = append(geminiRequest.Contents, dto.GeminiChatContent{
  300. Role: "user",
  301. })
  302. }
  303. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  304. name := ""
  305. if message.Name != nil {
  306. name = *message.Name
  307. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  308. name = val
  309. }
  310. var contentMap map[string]interface{}
  311. contentStr := message.StringContent()
  312. // 1. 尝试解析为 JSON 对象
  313. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  314. // 2. 如果失败,尝试解析为 JSON 数组
  315. var contentSlice []interface{}
  316. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  317. // 如果是数组,包装成对象
  318. contentMap = map[string]interface{}{"result": contentSlice}
  319. } else {
  320. // 3. 如果再次失败,作为纯文本处理
  321. contentMap = map[string]interface{}{"content": contentStr}
  322. }
  323. }
  324. functionResp := &dto.GeminiFunctionResponse{
  325. Name: name,
  326. Response: contentMap,
  327. }
  328. *parts = append(*parts, dto.GeminiPart{
  329. FunctionResponse: functionResp,
  330. })
  331. continue
  332. }
  333. var parts []dto.GeminiPart
  334. content := dto.GeminiChatContent{
  335. Role: message.Role,
  336. }
  337. // isToolCall := false
  338. if message.ToolCalls != nil {
  339. // message.Role = "model"
  340. // isToolCall = true
  341. for _, call := range message.ParseToolCalls() {
  342. args := map[string]interface{}{}
  343. if call.Function.Arguments != "" {
  344. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  345. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  346. }
  347. }
  348. toolCall := dto.GeminiPart{
  349. FunctionCall: &dto.FunctionCall{
  350. FunctionName: call.Function.Name,
  351. Arguments: args,
  352. },
  353. }
  354. parts = append(parts, toolCall)
  355. tool_call_ids[call.ID] = call.Function.Name
  356. }
  357. }
  358. openaiContent := message.ParseContent()
  359. imageNum := 0
  360. for _, part := range openaiContent {
  361. if part.Type == dto.ContentTypeText {
  362. if part.Text == "" {
  363. continue
  364. }
  365. parts = append(parts, dto.GeminiPart{
  366. Text: part.Text,
  367. })
  368. } else if part.Type == dto.ContentTypeImageURL {
  369. imageNum += 1
  370. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  371. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  372. }
  373. // 判断是否是url
  374. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  375. // 是url,获取文件的类型和base64编码的数据
  376. fileData, err := service.GetFileBase64FromUrl(c, part.GetImageMedia().Url, "formatting image for Gemini")
  377. if err != nil {
  378. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  379. }
  380. // 校验 MimeType 是否在 Gemini 支持的白名单中
  381. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  382. url := part.GetImageMedia().Url
  383. return nil, fmt.Errorf("mime type is not supported by Gemini: '%s', url: '%s', supported types are: %v", fileData.MimeType, url, getSupportedMimeTypesList())
  384. }
  385. parts = append(parts, dto.GeminiPart{
  386. InlineData: &dto.GeminiInlineData{
  387. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  388. Data: fileData.Base64Data,
  389. },
  390. })
  391. } else {
  392. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  393. if err != nil {
  394. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  395. }
  396. parts = append(parts, dto.GeminiPart{
  397. InlineData: &dto.GeminiInlineData{
  398. MimeType: format,
  399. Data: base64String,
  400. },
  401. })
  402. }
  403. } else if part.Type == dto.ContentTypeFile {
  404. if part.GetFile().FileId != "" {
  405. return nil, fmt.Errorf("only base64 file is supported in gemini")
  406. }
  407. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  408. if err != nil {
  409. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  410. }
  411. parts = append(parts, dto.GeminiPart{
  412. InlineData: &dto.GeminiInlineData{
  413. MimeType: format,
  414. Data: base64String,
  415. },
  416. })
  417. } else if part.Type == dto.ContentTypeInputAudio {
  418. if part.GetInputAudio().Data == "" {
  419. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  420. }
  421. base64String, err := service.DecodeBase64AudioData(part.GetInputAudio().Data)
  422. if err != nil {
  423. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  424. }
  425. parts = append(parts, dto.GeminiPart{
  426. InlineData: &dto.GeminiInlineData{
  427. MimeType: "audio/" + part.GetInputAudio().Format,
  428. Data: base64String,
  429. },
  430. })
  431. }
  432. }
  433. content.Parts = parts
  434. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  435. if content.Role == "assistant" {
  436. content.Role = "model"
  437. }
  438. if len(content.Parts) > 0 {
  439. geminiRequest.Contents = append(geminiRequest.Contents, content)
  440. }
  441. }
  442. if len(system_content) > 0 {
  443. geminiRequest.SystemInstructions = &dto.GeminiChatContent{
  444. Parts: []dto.GeminiPart{
  445. {
  446. Text: strings.Join(system_content, "\n"),
  447. },
  448. },
  449. }
  450. }
  451. return &geminiRequest, nil
  452. }
  453. // Helper function to get a list of supported MIME types for error messages
  454. func getSupportedMimeTypesList() []string {
  455. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  456. for k := range geminiSupportedMimeTypes {
  457. keys = append(keys, k)
  458. }
  459. return keys
  460. }
  461. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  462. func cleanFunctionParameters(params interface{}) interface{} {
  463. if params == nil {
  464. return nil
  465. }
  466. switch v := params.(type) {
  467. case map[string]interface{}:
  468. // Create a copy to avoid modifying the original
  469. cleanedMap := make(map[string]interface{})
  470. for k, val := range v {
  471. cleanedMap[k] = val
  472. }
  473. // Remove unsupported root-level fields
  474. delete(cleanedMap, "default")
  475. delete(cleanedMap, "exclusiveMaximum")
  476. delete(cleanedMap, "exclusiveMinimum")
  477. delete(cleanedMap, "$schema")
  478. delete(cleanedMap, "additionalProperties")
  479. // Check and clean 'format' for string types
  480. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  481. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  482. if formatValue != "enum" && formatValue != "date-time" {
  483. delete(cleanedMap, "format")
  484. }
  485. }
  486. }
  487. // Clean properties
  488. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  489. cleanedProps := make(map[string]interface{})
  490. for propName, propValue := range props {
  491. cleanedProps[propName] = cleanFunctionParameters(propValue)
  492. }
  493. cleanedMap["properties"] = cleanedProps
  494. }
  495. // Recursively clean items in arrays
  496. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  497. cleanedMap["items"] = cleanFunctionParameters(items)
  498. }
  499. // Also handle items if it's an array of schemas
  500. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  501. cleanedItemsArray := make([]interface{}, len(itemsArray))
  502. for i, item := range itemsArray {
  503. cleanedItemsArray[i] = cleanFunctionParameters(item)
  504. }
  505. cleanedMap["items"] = cleanedItemsArray
  506. }
  507. // Recursively clean other schema composition keywords
  508. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  509. if nested, ok := cleanedMap[field].([]interface{}); ok {
  510. cleanedNested := make([]interface{}, len(nested))
  511. for i, item := range nested {
  512. cleanedNested[i] = cleanFunctionParameters(item)
  513. }
  514. cleanedMap[field] = cleanedNested
  515. }
  516. }
  517. // Recursively clean patternProperties
  518. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  519. cleanedPatternProps := make(map[string]interface{})
  520. for pattern, schema := range patternProps {
  521. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  522. }
  523. cleanedMap["patternProperties"] = cleanedPatternProps
  524. }
  525. // Recursively clean definitions
  526. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  527. cleanedDefinitions := make(map[string]interface{})
  528. for defName, defSchema := range definitions {
  529. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  530. }
  531. cleanedMap["definitions"] = cleanedDefinitions
  532. }
  533. // Recursively clean $defs (newer JSON Schema draft)
  534. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  535. cleanedDefs := make(map[string]interface{})
  536. for defName, defSchema := range defs {
  537. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  538. }
  539. cleanedMap["$defs"] = cleanedDefs
  540. }
  541. // Clean conditional keywords
  542. for _, field := range []string{"if", "then", "else", "not"} {
  543. if nested, ok := cleanedMap[field]; ok {
  544. cleanedMap[field] = cleanFunctionParameters(nested)
  545. }
  546. }
  547. return cleanedMap
  548. case []interface{}:
  549. // Handle arrays of schemas
  550. cleanedArray := make([]interface{}, len(v))
  551. for i, item := range v {
  552. cleanedArray[i] = cleanFunctionParameters(item)
  553. }
  554. return cleanedArray
  555. default:
  556. // Not a map or array, return as is (e.g., could be a primitive)
  557. return params
  558. }
  559. }
  560. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  561. if depth >= 5 {
  562. return schema
  563. }
  564. v, ok := schema.(map[string]interface{})
  565. if !ok || len(v) == 0 {
  566. return schema
  567. }
  568. // 删除所有的title字段
  569. delete(v, "title")
  570. delete(v, "$schema")
  571. // 如果type不为object和array,则直接返回
  572. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  573. return schema
  574. }
  575. switch v["type"] {
  576. case "object":
  577. delete(v, "additionalProperties")
  578. // 处理 properties
  579. if properties, ok := v["properties"].(map[string]interface{}); ok {
  580. for key, value := range properties {
  581. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  582. }
  583. }
  584. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  585. if nested, ok := v[field].([]interface{}); ok {
  586. for i, item := range nested {
  587. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  588. }
  589. }
  590. }
  591. case "array":
  592. if items, ok := v["items"].(map[string]interface{}); ok {
  593. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  594. }
  595. }
  596. return v
  597. }
  598. func unescapeString(s string) (string, error) {
  599. var result []rune
  600. escaped := false
  601. i := 0
  602. for i < len(s) {
  603. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  604. if r == utf8.RuneError {
  605. return "", fmt.Errorf("invalid UTF-8 encoding")
  606. }
  607. if escaped {
  608. // 如果是转义符后的字符,检查其类型
  609. switch r {
  610. case '"':
  611. result = append(result, '"')
  612. case '\\':
  613. result = append(result, '\\')
  614. case '/':
  615. result = append(result, '/')
  616. case 'b':
  617. result = append(result, '\b')
  618. case 'f':
  619. result = append(result, '\f')
  620. case 'n':
  621. result = append(result, '\n')
  622. case 'r':
  623. result = append(result, '\r')
  624. case 't':
  625. result = append(result, '\t')
  626. case '\'':
  627. result = append(result, '\'')
  628. default:
  629. // 如果遇到一个非法的转义字符,直接按原样输出
  630. result = append(result, '\\', r)
  631. }
  632. escaped = false
  633. } else {
  634. if r == '\\' {
  635. escaped = true // 记录反斜杠作为转义符
  636. } else {
  637. result = append(result, r)
  638. }
  639. }
  640. i += size // 移动到下一个字符
  641. }
  642. return string(result), nil
  643. }
  644. func unescapeMapOrSlice(data interface{}) interface{} {
  645. switch v := data.(type) {
  646. case map[string]interface{}:
  647. for k, val := range v {
  648. v[k] = unescapeMapOrSlice(val)
  649. }
  650. case []interface{}:
  651. for i, val := range v {
  652. v[i] = unescapeMapOrSlice(val)
  653. }
  654. case string:
  655. if unescaped, err := unescapeString(v); err != nil {
  656. return v
  657. } else {
  658. return unescaped
  659. }
  660. }
  661. return data
  662. }
  663. func getResponseToolCall(item *dto.GeminiPart) *dto.ToolCallResponse {
  664. var argsBytes []byte
  665. var err error
  666. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  667. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  668. } else {
  669. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  670. }
  671. if err != nil {
  672. return nil
  673. }
  674. return &dto.ToolCallResponse{
  675. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  676. Type: "function",
  677. Function: dto.FunctionResponse{
  678. Arguments: string(argsBytes),
  679. Name: item.FunctionCall.FunctionName,
  680. },
  681. }
  682. }
  683. func responseGeminiChat2OpenAI(c *gin.Context, response *dto.GeminiChatResponse) *dto.OpenAITextResponse {
  684. fullTextResponse := dto.OpenAITextResponse{
  685. Id: helper.GetResponseID(c),
  686. Object: "chat.completion",
  687. Created: common.GetTimestamp(),
  688. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  689. }
  690. isToolCall := false
  691. for _, candidate := range response.Candidates {
  692. choice := dto.OpenAITextResponseChoice{
  693. Index: int(candidate.Index),
  694. Message: dto.Message{
  695. Role: "assistant",
  696. Content: "",
  697. },
  698. FinishReason: constant.FinishReasonStop,
  699. }
  700. if len(candidate.Content.Parts) > 0 {
  701. var texts []string
  702. var toolCalls []dto.ToolCallResponse
  703. for _, part := range candidate.Content.Parts {
  704. if part.InlineData != nil {
  705. // 媒体内容
  706. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  707. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  708. texts = append(texts, imgText)
  709. } else {
  710. // 其他媒体类型,直接显示链接
  711. texts = append(texts, fmt.Sprintf("[media](data:%s;base64,%s)", part.InlineData.MimeType, part.InlineData.Data))
  712. }
  713. } else if part.FunctionCall != nil {
  714. choice.FinishReason = constant.FinishReasonToolCalls
  715. if call := getResponseToolCall(&part); call != nil {
  716. toolCalls = append(toolCalls, *call)
  717. }
  718. } else if part.Thought {
  719. choice.Message.ReasoningContent = part.Text
  720. } else {
  721. if part.ExecutableCode != nil {
  722. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  723. } else if part.CodeExecutionResult != nil {
  724. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  725. } else {
  726. // 过滤掉空行
  727. if part.Text != "\n" {
  728. texts = append(texts, part.Text)
  729. }
  730. }
  731. }
  732. }
  733. if len(toolCalls) > 0 {
  734. choice.Message.SetToolCalls(toolCalls)
  735. isToolCall = true
  736. }
  737. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  738. }
  739. if candidate.FinishReason != nil {
  740. switch *candidate.FinishReason {
  741. case "STOP":
  742. choice.FinishReason = constant.FinishReasonStop
  743. case "MAX_TOKENS":
  744. choice.FinishReason = constant.FinishReasonLength
  745. default:
  746. choice.FinishReason = constant.FinishReasonContentFilter
  747. }
  748. }
  749. if isToolCall {
  750. choice.FinishReason = constant.FinishReasonToolCalls
  751. }
  752. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  753. }
  754. return &fullTextResponse
  755. }
  756. func streamResponseGeminiChat2OpenAI(geminiResponse *dto.GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool) {
  757. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  758. isStop := false
  759. for _, candidate := range geminiResponse.Candidates {
  760. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  761. isStop = true
  762. candidate.FinishReason = nil
  763. }
  764. choice := dto.ChatCompletionsStreamResponseChoice{
  765. Index: int(candidate.Index),
  766. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  767. //Role: "assistant",
  768. },
  769. }
  770. var texts []string
  771. isTools := false
  772. isThought := false
  773. if candidate.FinishReason != nil {
  774. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  775. switch *candidate.FinishReason {
  776. case "STOP":
  777. choice.FinishReason = &constant.FinishReasonStop
  778. case "MAX_TOKENS":
  779. choice.FinishReason = &constant.FinishReasonLength
  780. default:
  781. choice.FinishReason = &constant.FinishReasonContentFilter
  782. }
  783. }
  784. for _, part := range candidate.Content.Parts {
  785. if part.InlineData != nil {
  786. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  787. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  788. texts = append(texts, imgText)
  789. }
  790. } else if part.FunctionCall != nil {
  791. isTools = true
  792. if call := getResponseToolCall(&part); call != nil {
  793. call.SetIndex(len(choice.Delta.ToolCalls))
  794. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  795. }
  796. } else if part.Thought {
  797. isThought = true
  798. texts = append(texts, part.Text)
  799. } else {
  800. if part.ExecutableCode != nil {
  801. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  802. } else if part.CodeExecutionResult != nil {
  803. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  804. } else {
  805. if part.Text != "\n" {
  806. texts = append(texts, part.Text)
  807. }
  808. }
  809. }
  810. }
  811. if isThought {
  812. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  813. } else {
  814. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  815. }
  816. if isTools {
  817. choice.FinishReason = &constant.FinishReasonToolCalls
  818. }
  819. choices = append(choices, choice)
  820. }
  821. var response dto.ChatCompletionsStreamResponse
  822. response.Object = "chat.completion.chunk"
  823. response.Choices = choices
  824. return &response, isStop
  825. }
  826. func handleStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  827. streamData, err := common.Marshal(resp)
  828. if err != nil {
  829. return fmt.Errorf("failed to marshal stream response: %w", err)
  830. }
  831. err = openai.HandleStreamFormat(c, info, string(streamData), info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  832. if err != nil {
  833. return fmt.Errorf("failed to handle stream format: %w", err)
  834. }
  835. return nil
  836. }
  837. func handleFinalStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
  838. streamData, err := common.Marshal(resp)
  839. if err != nil {
  840. return fmt.Errorf("failed to marshal stream response: %w", err)
  841. }
  842. openai.HandleFinalResponse(c, info, string(streamData), resp.Id, resp.Created, resp.Model, resp.GetSystemFingerprint(), resp.Usage, false)
  843. return nil
  844. }
  845. func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  846. // responseText := ""
  847. id := helper.GetResponseID(c)
  848. createAt := common.GetTimestamp()
  849. responseText := strings.Builder{}
  850. var usage = &dto.Usage{}
  851. var imageCount int
  852. finishReason := constant.FinishReasonStop
  853. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  854. var geminiResponse dto.GeminiChatResponse
  855. err := common.UnmarshalJsonStr(data, &geminiResponse)
  856. if err != nil {
  857. logger.LogError(c, "error unmarshalling stream response: "+err.Error())
  858. return false
  859. }
  860. for _, candidate := range geminiResponse.Candidates {
  861. for _, part := range candidate.Content.Parts {
  862. if part.InlineData != nil && part.InlineData.MimeType != "" {
  863. imageCount++
  864. }
  865. if part.Text != "" {
  866. responseText.WriteString(part.Text)
  867. }
  868. }
  869. }
  870. response, isStop := streamResponseGeminiChat2OpenAI(&geminiResponse)
  871. response.Id = id
  872. response.Created = createAt
  873. response.Model = info.UpstreamModelName
  874. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  875. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  876. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  877. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  878. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  879. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  880. if detail.Modality == "AUDIO" {
  881. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  882. } else if detail.Modality == "TEXT" {
  883. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  884. }
  885. }
  886. }
  887. logger.LogDebug(c, fmt.Sprintf("info.SendResponseCount = %d", info.SendResponseCount))
  888. if info.SendResponseCount == 0 {
  889. // send first response
  890. emptyResponse := helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil)
  891. if response.IsToolCall() {
  892. emptyResponse.Choices[0].Delta.ToolCalls = make([]dto.ToolCallResponse, 1)
  893. emptyResponse.Choices[0].Delta.ToolCalls[0] = *response.GetFirstToolCall()
  894. emptyResponse.Choices[0].Delta.ToolCalls[0].Function.Arguments = ""
  895. finishReason = constant.FinishReasonToolCalls
  896. err = handleStream(c, info, emptyResponse)
  897. if err != nil {
  898. logger.LogError(c, err.Error())
  899. }
  900. response.ClearToolCalls()
  901. if response.IsFinished() {
  902. response.Choices[0].FinishReason = nil
  903. }
  904. } else {
  905. err = handleStream(c, info, emptyResponse)
  906. if err != nil {
  907. logger.LogError(c, err.Error())
  908. }
  909. }
  910. }
  911. err = handleStream(c, info, response)
  912. if err != nil {
  913. logger.LogError(c, err.Error())
  914. }
  915. if isStop {
  916. _ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, finishReason))
  917. }
  918. return true
  919. })
  920. if info.SendResponseCount == 0 {
  921. // 空补全,报错不计费
  922. // empty response, throw an error
  923. return nil, types.NewOpenAIError(errors.New("no response received from Gemini API"), types.ErrorCodeEmptyResponse, http.StatusInternalServerError)
  924. }
  925. if imageCount != 0 {
  926. if usage.CompletionTokens == 0 {
  927. usage.CompletionTokens = imageCount * 258
  928. }
  929. }
  930. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  931. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  932. if usage.CompletionTokens == 0 {
  933. str := responseText.String()
  934. if len(str) > 0 {
  935. usage = service.ResponseText2Usage(responseText.String(), info.UpstreamModelName, info.PromptTokens)
  936. } else {
  937. // 空补全,不需要使用量
  938. usage = &dto.Usage{}
  939. }
  940. }
  941. response := helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  942. err := handleFinalStream(c, info, response)
  943. if err != nil {
  944. common.SysLog("send final response failed: " + err.Error())
  945. }
  946. //if info.RelayFormat == relaycommon.RelayFormatOpenAI {
  947. // helper.Done(c)
  948. //}
  949. //resp.Body.Close()
  950. return usage, nil
  951. }
  952. func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  953. responseBody, err := io.ReadAll(resp.Body)
  954. if err != nil {
  955. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  956. }
  957. service.CloseResponseBodyGracefully(resp)
  958. if common.DebugEnabled {
  959. println(string(responseBody))
  960. }
  961. var geminiResponse dto.GeminiChatResponse
  962. err = common.Unmarshal(responseBody, &geminiResponse)
  963. if err != nil {
  964. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  965. }
  966. if len(geminiResponse.Candidates) == 0 {
  967. return nil, types.NewOpenAIError(errors.New("no candidates returned"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  968. }
  969. fullTextResponse := responseGeminiChat2OpenAI(c, &geminiResponse)
  970. fullTextResponse.Model = info.UpstreamModelName
  971. usage := dto.Usage{
  972. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  973. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  974. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  975. }
  976. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  977. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  978. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  979. if detail.Modality == "AUDIO" {
  980. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  981. } else if detail.Modality == "TEXT" {
  982. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  983. }
  984. }
  985. fullTextResponse.Usage = usage
  986. switch info.RelayFormat {
  987. case types.RelayFormatOpenAI:
  988. responseBody, err = common.Marshal(fullTextResponse)
  989. if err != nil {
  990. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  991. }
  992. case types.RelayFormatClaude:
  993. claudeResp := service.ResponseOpenAI2Claude(fullTextResponse, info)
  994. claudeRespStr, err := common.Marshal(claudeResp)
  995. if err != nil {
  996. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  997. }
  998. responseBody = claudeRespStr
  999. case types.RelayFormatGemini:
  1000. break
  1001. }
  1002. service.IOCopyBytesGracefully(c, resp, responseBody)
  1003. return &usage, nil
  1004. }
  1005. func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1006. defer service.CloseResponseBodyGracefully(resp)
  1007. responseBody, readErr := io.ReadAll(resp.Body)
  1008. if readErr != nil {
  1009. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1010. }
  1011. var geminiResponse dto.GeminiBatchEmbeddingResponse
  1012. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  1013. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1014. }
  1015. // convert to openai format response
  1016. openAIResponse := dto.OpenAIEmbeddingResponse{
  1017. Object: "list",
  1018. Data: make([]dto.OpenAIEmbeddingResponseItem, 0, len(geminiResponse.Embeddings)),
  1019. Model: info.UpstreamModelName,
  1020. }
  1021. for i, embedding := range geminiResponse.Embeddings {
  1022. openAIResponse.Data = append(openAIResponse.Data, dto.OpenAIEmbeddingResponseItem{
  1023. Object: "embedding",
  1024. Embedding: embedding.Values,
  1025. Index: i,
  1026. })
  1027. }
  1028. // calculate usage
  1029. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  1030. // Google has not yet clarified how embedding models will be billed
  1031. // refer to openai billing method to use input tokens billing
  1032. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  1033. usage := &dto.Usage{
  1034. PromptTokens: info.PromptTokens,
  1035. CompletionTokens: 0,
  1036. TotalTokens: info.PromptTokens,
  1037. }
  1038. openAIResponse.Usage = *usage
  1039. jsonResponse, jsonErr := common.Marshal(openAIResponse)
  1040. if jsonErr != nil {
  1041. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1042. }
  1043. service.IOCopyBytesGracefully(c, resp, jsonResponse)
  1044. return usage, nil
  1045. }
  1046. func GeminiImageHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  1047. responseBody, readErr := io.ReadAll(resp.Body)
  1048. if readErr != nil {
  1049. return nil, types.NewOpenAIError(readErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1050. }
  1051. _ = resp.Body.Close()
  1052. var geminiResponse dto.GeminiImageResponse
  1053. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  1054. return nil, types.NewOpenAIError(jsonErr, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1055. }
  1056. if len(geminiResponse.Predictions) == 0 {
  1057. return nil, types.NewOpenAIError(errors.New("no images generated"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  1058. }
  1059. // convert to openai format response
  1060. openAIResponse := dto.ImageResponse{
  1061. Created: common.GetTimestamp(),
  1062. Data: make([]dto.ImageData, 0, len(geminiResponse.Predictions)),
  1063. }
  1064. for _, prediction := range geminiResponse.Predictions {
  1065. if prediction.RaiFilteredReason != "" {
  1066. continue // skip filtered image
  1067. }
  1068. openAIResponse.Data = append(openAIResponse.Data, dto.ImageData{
  1069. B64Json: prediction.BytesBase64Encoded,
  1070. })
  1071. }
  1072. jsonResponse, jsonErr := json.Marshal(openAIResponse)
  1073. if jsonErr != nil {
  1074. return nil, types.NewError(jsonErr, types.ErrorCodeBadResponseBody)
  1075. }
  1076. c.Writer.Header().Set("Content-Type", "application/json")
  1077. c.Writer.WriteHeader(resp.StatusCode)
  1078. _, _ = c.Writer.Write(jsonResponse)
  1079. // https://github.com/google-gemini/cookbook/blob/719a27d752aac33f39de18a8d3cb42a70874917e/quickstarts/Counting_Tokens.ipynb
  1080. // each image has fixed 258 tokens
  1081. const imageTokens = 258
  1082. generatedImages := len(openAIResponse.Data)
  1083. usage := &dto.Usage{
  1084. PromptTokens: imageTokens * generatedImages, // each generated image has fixed 258 tokens
  1085. CompletionTokens: 0, // image generation does not calculate completion tokens
  1086. TotalTokens: imageTokens * generatedImages,
  1087. }
  1088. return usage, nil
  1089. }