relay-openai.go 23 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691
  1. package openai
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "strings"
  7. "github.com/QuantumNous/new-api/common"
  8. "github.com/QuantumNous/new-api/constant"
  9. "github.com/QuantumNous/new-api/dto"
  10. "github.com/QuantumNous/new-api/logger"
  11. "github.com/QuantumNous/new-api/relay/channel/openrouter"
  12. relaycommon "github.com/QuantumNous/new-api/relay/common"
  13. "github.com/QuantumNous/new-api/relay/helper"
  14. "github.com/QuantumNous/new-api/service"
  15. "github.com/QuantumNous/new-api/types"
  16. "github.com/bytedance/gopkg/util/gopool"
  17. "github.com/gin-gonic/gin"
  18. "github.com/gorilla/websocket"
  19. )
  20. func sendStreamData(c *gin.Context, info *relaycommon.RelayInfo, data string, forceFormat bool, thinkToContent bool) error {
  21. if data == "" {
  22. return nil
  23. }
  24. if !forceFormat && !thinkToContent {
  25. return helper.StringData(c, data)
  26. }
  27. var lastStreamResponse dto.ChatCompletionsStreamResponse
  28. if err := common.UnmarshalJsonStr(data, &lastStreamResponse); err != nil {
  29. return err
  30. }
  31. if !thinkToContent {
  32. return helper.ObjectData(c, lastStreamResponse)
  33. }
  34. hasThinkingContent := false
  35. hasContent := false
  36. var thinkingContent strings.Builder
  37. for _, choice := range lastStreamResponse.Choices {
  38. if len(choice.Delta.GetReasoningContent()) > 0 {
  39. hasThinkingContent = true
  40. thinkingContent.WriteString(choice.Delta.GetReasoningContent())
  41. }
  42. if len(choice.Delta.GetContentString()) > 0 {
  43. hasContent = true
  44. }
  45. }
  46. // Handle think to content conversion
  47. if info.ThinkingContentInfo.IsFirstThinkingContent {
  48. if hasThinkingContent {
  49. response := lastStreamResponse.Copy()
  50. for i := range response.Choices {
  51. // send `think` tag with thinking content
  52. response.Choices[i].Delta.SetContentString("<think>\n" + thinkingContent.String())
  53. response.Choices[i].Delta.ReasoningContent = nil
  54. response.Choices[i].Delta.Reasoning = nil
  55. }
  56. info.ThinkingContentInfo.IsFirstThinkingContent = false
  57. info.ThinkingContentInfo.HasSentThinkingContent = true
  58. return helper.ObjectData(c, response)
  59. }
  60. }
  61. if lastStreamResponse.Choices == nil || len(lastStreamResponse.Choices) == 0 {
  62. return helper.ObjectData(c, lastStreamResponse)
  63. }
  64. // Process each choice
  65. for i, choice := range lastStreamResponse.Choices {
  66. // Handle transition from thinking to content
  67. // only send `</think>` tag when previous thinking content has been sent
  68. if hasContent && !info.ThinkingContentInfo.SendLastThinkingContent && info.ThinkingContentInfo.HasSentThinkingContent {
  69. response := lastStreamResponse.Copy()
  70. for j := range response.Choices {
  71. response.Choices[j].Delta.SetContentString("\n</think>\n")
  72. response.Choices[j].Delta.ReasoningContent = nil
  73. response.Choices[j].Delta.Reasoning = nil
  74. }
  75. info.ThinkingContentInfo.SendLastThinkingContent = true
  76. helper.ObjectData(c, response)
  77. }
  78. // Convert reasoning content to regular content if any
  79. if len(choice.Delta.GetReasoningContent()) > 0 {
  80. lastStreamResponse.Choices[i].Delta.SetContentString(choice.Delta.GetReasoningContent())
  81. lastStreamResponse.Choices[i].Delta.ReasoningContent = nil
  82. lastStreamResponse.Choices[i].Delta.Reasoning = nil
  83. } else if !hasThinkingContent && !hasContent {
  84. // flush thinking content
  85. lastStreamResponse.Choices[i].Delta.ReasoningContent = nil
  86. lastStreamResponse.Choices[i].Delta.Reasoning = nil
  87. }
  88. }
  89. return helper.ObjectData(c, lastStreamResponse)
  90. }
  91. func OaiStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  92. if resp == nil || resp.Body == nil {
  93. logger.LogError(c, "invalid response or response body")
  94. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  95. }
  96. defer service.CloseResponseBodyGracefully(resp)
  97. model := info.UpstreamModelName
  98. var responseId string
  99. var createAt int64 = 0
  100. var systemFingerprint string
  101. var containStreamUsage bool
  102. var responseTextBuilder strings.Builder
  103. var toolCount int
  104. var usage = &dto.Usage{}
  105. var streamItems []string // store stream items
  106. var lastStreamData string
  107. var secondLastStreamData string // 存储倒数第二个stream data,用于音频模型
  108. // 检查是否为音频模型
  109. isAudioModel := strings.Contains(strings.ToLower(model), "audio")
  110. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  111. if lastStreamData != "" {
  112. err := HandleStreamFormat(c, info, lastStreamData, info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  113. if err != nil {
  114. common.SysLog("error handling stream format: " + err.Error())
  115. }
  116. }
  117. if len(data) > 0 {
  118. // 对音频模型,保存倒数第二个stream data
  119. if isAudioModel && lastStreamData != "" {
  120. secondLastStreamData = lastStreamData
  121. }
  122. lastStreamData = data
  123. streamItems = append(streamItems, data)
  124. }
  125. return true
  126. })
  127. // 对音频模型,从倒数第二个stream data中提取usage信息
  128. if isAudioModel && secondLastStreamData != "" {
  129. var streamResp struct {
  130. Usage *dto.Usage `json:"usage"`
  131. }
  132. err := common.Unmarshal([]byte(secondLastStreamData), &streamResp)
  133. if err == nil && streamResp.Usage != nil && service.ValidUsage(streamResp.Usage) {
  134. usage = streamResp.Usage
  135. containStreamUsage = true
  136. if common.DebugEnabled {
  137. logger.LogDebug(c, fmt.Sprintf("Audio model usage extracted from second last SSE: PromptTokens=%d, CompletionTokens=%d, TotalTokens=%d, InputTokens=%d, OutputTokens=%d",
  138. usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens,
  139. usage.InputTokens, usage.OutputTokens))
  140. }
  141. }
  142. }
  143. // 处理最后的响应
  144. shouldSendLastResp := true
  145. if err := handleLastResponse(lastStreamData, &responseId, &createAt, &systemFingerprint, &model, &usage,
  146. &containStreamUsage, info, &shouldSendLastResp); err != nil {
  147. logger.LogError(c, fmt.Sprintf("error handling last response: %s, lastStreamData: [%s]", err.Error(), lastStreamData))
  148. }
  149. if info.RelayFormat == types.RelayFormatOpenAI {
  150. if shouldSendLastResp {
  151. _ = sendStreamData(c, info, lastStreamData, info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  152. }
  153. }
  154. // 处理token计算
  155. if err := processTokens(info.RelayMode, streamItems, &responseTextBuilder, &toolCount); err != nil {
  156. logger.LogError(c, "error processing tokens: "+err.Error())
  157. }
  158. if !containStreamUsage {
  159. usage = service.ResponseText2Usage(c, responseTextBuilder.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  160. usage.CompletionTokens += toolCount * 7
  161. }
  162. applyUsagePostProcessing(info, usage, common.StringToByteSlice(lastStreamData))
  163. HandleFinalResponse(c, info, lastStreamData, responseId, createAt, model, systemFingerprint, usage, containStreamUsage)
  164. return usage, nil
  165. }
  166. func OpenaiHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  167. defer service.CloseResponseBodyGracefully(resp)
  168. var simpleResponse dto.OpenAITextResponse
  169. responseBody, err := io.ReadAll(resp.Body)
  170. if err != nil {
  171. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  172. }
  173. if common.DebugEnabled {
  174. println("upstream response body:", string(responseBody))
  175. }
  176. // Unmarshal to simpleResponse
  177. if info.ChannelType == constant.ChannelTypeOpenRouter && info.ChannelOtherSettings.IsOpenRouterEnterprise() {
  178. // 尝试解析为 openrouter enterprise
  179. var enterpriseResponse openrouter.OpenRouterEnterpriseResponse
  180. err = common.Unmarshal(responseBody, &enterpriseResponse)
  181. if err != nil {
  182. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  183. }
  184. if enterpriseResponse.Success {
  185. responseBody = enterpriseResponse.Data
  186. } else {
  187. logger.LogError(c, fmt.Sprintf("openrouter enterprise response success=false, data: %s", enterpriseResponse.Data))
  188. return nil, types.NewOpenAIError(fmt.Errorf("openrouter response success=false"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  189. }
  190. }
  191. err = common.Unmarshal(responseBody, &simpleResponse)
  192. if err != nil {
  193. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  194. }
  195. if oaiError := simpleResponse.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
  196. return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
  197. }
  198. for _, choice := range simpleResponse.Choices {
  199. if choice.FinishReason == constant.FinishReasonContentFilter {
  200. common.SetContextKey(c, constant.ContextKeyAdminRejectReason, "openai_finish_reason=content_filter")
  201. break
  202. }
  203. }
  204. forceFormat := false
  205. if info.ChannelSetting.ForceFormat {
  206. forceFormat = true
  207. }
  208. usageModified := false
  209. if simpleResponse.Usage.PromptTokens == 0 {
  210. completionTokens := simpleResponse.Usage.CompletionTokens
  211. if completionTokens == 0 {
  212. for _, choice := range simpleResponse.Choices {
  213. ctkm := service.CountTextToken(choice.Message.StringContent()+choice.Message.ReasoningContent+choice.Message.Reasoning, info.UpstreamModelName)
  214. completionTokens += ctkm
  215. }
  216. }
  217. simpleResponse.Usage = dto.Usage{
  218. PromptTokens: info.GetEstimatePromptTokens(),
  219. CompletionTokens: completionTokens,
  220. TotalTokens: info.GetEstimatePromptTokens() + completionTokens,
  221. }
  222. usageModified = true
  223. }
  224. applyUsagePostProcessing(info, &simpleResponse.Usage, responseBody)
  225. switch info.RelayFormat {
  226. case types.RelayFormatOpenAI:
  227. if usageModified {
  228. var bodyMap map[string]interface{}
  229. err = common.Unmarshal(responseBody, &bodyMap)
  230. if err != nil {
  231. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  232. }
  233. bodyMap["usage"] = simpleResponse.Usage
  234. responseBody, _ = common.Marshal(bodyMap)
  235. }
  236. if forceFormat {
  237. responseBody, err = common.Marshal(simpleResponse)
  238. if err != nil {
  239. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  240. }
  241. } else {
  242. break
  243. }
  244. case types.RelayFormatClaude:
  245. claudeResp := service.ResponseOpenAI2Claude(&simpleResponse, info)
  246. claudeRespStr, err := common.Marshal(claudeResp)
  247. if err != nil {
  248. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  249. }
  250. responseBody = claudeRespStr
  251. case types.RelayFormatGemini:
  252. geminiResp := service.ResponseOpenAI2Gemini(&simpleResponse, info)
  253. geminiRespStr, err := common.Marshal(geminiResp)
  254. if err != nil {
  255. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  256. }
  257. responseBody = geminiRespStr
  258. }
  259. service.IOCopyBytesGracefully(c, resp, responseBody)
  260. return &simpleResponse.Usage, nil
  261. }
  262. func streamTTSResponse(c *gin.Context, resp *http.Response) {
  263. c.Writer.WriteHeaderNow()
  264. flusher, ok := c.Writer.(http.Flusher)
  265. if !ok {
  266. logger.LogWarn(c, "streaming not supported")
  267. _, err := io.Copy(c.Writer, resp.Body)
  268. if err != nil {
  269. logger.LogWarn(c, err.Error())
  270. }
  271. return
  272. }
  273. buffer := make([]byte, 4096)
  274. for {
  275. n, err := resp.Body.Read(buffer)
  276. //logger.LogInfo(c, fmt.Sprintf("streamTTSResponse read %d bytes", n))
  277. if n > 0 {
  278. if _, writeErr := c.Writer.Write(buffer[:n]); writeErr != nil {
  279. logger.LogError(c, writeErr.Error())
  280. break
  281. }
  282. flusher.Flush()
  283. }
  284. if err != nil {
  285. if err != io.EOF {
  286. logger.LogError(c, err.Error())
  287. }
  288. break
  289. }
  290. }
  291. }
  292. func OpenaiRealtimeHandler(c *gin.Context, info *relaycommon.RelayInfo) (*types.NewAPIError, *dto.RealtimeUsage) {
  293. if info == nil || info.ClientWs == nil || info.TargetWs == nil {
  294. return types.NewError(fmt.Errorf("invalid websocket connection"), types.ErrorCodeBadResponse), nil
  295. }
  296. info.IsStream = true
  297. clientConn := info.ClientWs
  298. targetConn := info.TargetWs
  299. clientClosed := make(chan struct{})
  300. targetClosed := make(chan struct{})
  301. sendChan := make(chan []byte, 100)
  302. receiveChan := make(chan []byte, 100)
  303. errChan := make(chan error, 2)
  304. usage := &dto.RealtimeUsage{}
  305. localUsage := &dto.RealtimeUsage{}
  306. sumUsage := &dto.RealtimeUsage{}
  307. gopool.Go(func() {
  308. defer func() {
  309. if r := recover(); r != nil {
  310. errChan <- fmt.Errorf("panic in client reader: %v", r)
  311. }
  312. }()
  313. for {
  314. select {
  315. case <-c.Done():
  316. return
  317. default:
  318. _, message, err := clientConn.ReadMessage()
  319. if err != nil {
  320. if !websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {
  321. errChan <- fmt.Errorf("error reading from client: %v", err)
  322. }
  323. close(clientClosed)
  324. return
  325. }
  326. realtimeEvent := &dto.RealtimeEvent{}
  327. err = common.Unmarshal(message, realtimeEvent)
  328. if err != nil {
  329. errChan <- fmt.Errorf("error unmarshalling message: %v", err)
  330. return
  331. }
  332. if realtimeEvent.Type == dto.RealtimeEventTypeSessionUpdate {
  333. if realtimeEvent.Session != nil {
  334. if realtimeEvent.Session.Tools != nil {
  335. info.RealtimeTools = realtimeEvent.Session.Tools
  336. }
  337. }
  338. }
  339. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  340. if err != nil {
  341. errChan <- fmt.Errorf("error counting text token: %v", err)
  342. return
  343. }
  344. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  345. localUsage.TotalTokens += textToken + audioToken
  346. localUsage.InputTokens += textToken + audioToken
  347. localUsage.InputTokenDetails.TextTokens += textToken
  348. localUsage.InputTokenDetails.AudioTokens += audioToken
  349. err = helper.WssString(c, targetConn, string(message))
  350. if err != nil {
  351. errChan <- fmt.Errorf("error writing to target: %v", err)
  352. return
  353. }
  354. select {
  355. case sendChan <- message:
  356. default:
  357. }
  358. }
  359. }
  360. })
  361. gopool.Go(func() {
  362. defer func() {
  363. if r := recover(); r != nil {
  364. errChan <- fmt.Errorf("panic in target reader: %v", r)
  365. }
  366. }()
  367. for {
  368. select {
  369. case <-c.Done():
  370. return
  371. default:
  372. _, message, err := targetConn.ReadMessage()
  373. if err != nil {
  374. if !websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {
  375. errChan <- fmt.Errorf("error reading from target: %v", err)
  376. }
  377. close(targetClosed)
  378. return
  379. }
  380. info.SetFirstResponseTime()
  381. realtimeEvent := &dto.RealtimeEvent{}
  382. err = common.Unmarshal(message, realtimeEvent)
  383. if err != nil {
  384. errChan <- fmt.Errorf("error unmarshalling message: %v", err)
  385. return
  386. }
  387. if realtimeEvent.Type == dto.RealtimeEventTypeResponseDone {
  388. realtimeUsage := realtimeEvent.Response.Usage
  389. if realtimeUsage != nil {
  390. usage.TotalTokens += realtimeUsage.TotalTokens
  391. usage.InputTokens += realtimeUsage.InputTokens
  392. usage.OutputTokens += realtimeUsage.OutputTokens
  393. usage.InputTokenDetails.AudioTokens += realtimeUsage.InputTokenDetails.AudioTokens
  394. usage.InputTokenDetails.CachedTokens += realtimeUsage.InputTokenDetails.CachedTokens
  395. usage.InputTokenDetails.TextTokens += realtimeUsage.InputTokenDetails.TextTokens
  396. usage.OutputTokenDetails.AudioTokens += realtimeUsage.OutputTokenDetails.AudioTokens
  397. usage.OutputTokenDetails.TextTokens += realtimeUsage.OutputTokenDetails.TextTokens
  398. err := preConsumeUsage(c, info, usage, sumUsage)
  399. if err != nil {
  400. errChan <- fmt.Errorf("error consume usage: %v", err)
  401. return
  402. }
  403. // 本次计费完成,清除
  404. usage = &dto.RealtimeUsage{}
  405. localUsage = &dto.RealtimeUsage{}
  406. } else {
  407. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  408. if err != nil {
  409. errChan <- fmt.Errorf("error counting text token: %v", err)
  410. return
  411. }
  412. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  413. localUsage.TotalTokens += textToken + audioToken
  414. info.IsFirstRequest = false
  415. localUsage.InputTokens += textToken + audioToken
  416. localUsage.InputTokenDetails.TextTokens += textToken
  417. localUsage.InputTokenDetails.AudioTokens += audioToken
  418. err = preConsumeUsage(c, info, localUsage, sumUsage)
  419. if err != nil {
  420. errChan <- fmt.Errorf("error consume usage: %v", err)
  421. return
  422. }
  423. // 本次计费完成,清除
  424. localUsage = &dto.RealtimeUsage{}
  425. // print now usage
  426. }
  427. logger.LogInfo(c, fmt.Sprintf("realtime streaming sumUsage: %v", sumUsage))
  428. logger.LogInfo(c, fmt.Sprintf("realtime streaming localUsage: %v", localUsage))
  429. logger.LogInfo(c, fmt.Sprintf("realtime streaming localUsage: %v", localUsage))
  430. } else if realtimeEvent.Type == dto.RealtimeEventTypeSessionUpdated || realtimeEvent.Type == dto.RealtimeEventTypeSessionCreated {
  431. realtimeSession := realtimeEvent.Session
  432. if realtimeSession != nil {
  433. // update audio format
  434. info.InputAudioFormat = common.GetStringIfEmpty(realtimeSession.InputAudioFormat, info.InputAudioFormat)
  435. info.OutputAudioFormat = common.GetStringIfEmpty(realtimeSession.OutputAudioFormat, info.OutputAudioFormat)
  436. }
  437. } else {
  438. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  439. if err != nil {
  440. errChan <- fmt.Errorf("error counting text token: %v", err)
  441. return
  442. }
  443. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  444. localUsage.TotalTokens += textToken + audioToken
  445. localUsage.OutputTokens += textToken + audioToken
  446. localUsage.OutputTokenDetails.TextTokens += textToken
  447. localUsage.OutputTokenDetails.AudioTokens += audioToken
  448. }
  449. err = helper.WssString(c, clientConn, string(message))
  450. if err != nil {
  451. errChan <- fmt.Errorf("error writing to client: %v", err)
  452. return
  453. }
  454. select {
  455. case receiveChan <- message:
  456. default:
  457. }
  458. }
  459. }
  460. })
  461. select {
  462. case <-clientClosed:
  463. case <-targetClosed:
  464. case err := <-errChan:
  465. //return service.OpenAIErrorWrapper(err, "realtime_error", http.StatusInternalServerError), nil
  466. logger.LogError(c, "realtime error: "+err.Error())
  467. case <-c.Done():
  468. }
  469. if usage.TotalTokens != 0 {
  470. _ = preConsumeUsage(c, info, usage, sumUsage)
  471. }
  472. if localUsage.TotalTokens != 0 {
  473. _ = preConsumeUsage(c, info, localUsage, sumUsage)
  474. }
  475. // check usage total tokens, if 0, use local usage
  476. return nil, sumUsage
  477. }
  478. func preConsumeUsage(ctx *gin.Context, info *relaycommon.RelayInfo, usage *dto.RealtimeUsage, totalUsage *dto.RealtimeUsage) error {
  479. if usage == nil || totalUsage == nil {
  480. return fmt.Errorf("invalid usage pointer")
  481. }
  482. totalUsage.TotalTokens += usage.TotalTokens
  483. totalUsage.InputTokens += usage.InputTokens
  484. totalUsage.OutputTokens += usage.OutputTokens
  485. totalUsage.InputTokenDetails.CachedTokens += usage.InputTokenDetails.CachedTokens
  486. totalUsage.InputTokenDetails.TextTokens += usage.InputTokenDetails.TextTokens
  487. totalUsage.InputTokenDetails.AudioTokens += usage.InputTokenDetails.AudioTokens
  488. totalUsage.OutputTokenDetails.TextTokens += usage.OutputTokenDetails.TextTokens
  489. totalUsage.OutputTokenDetails.AudioTokens += usage.OutputTokenDetails.AudioTokens
  490. // clear usage
  491. err := service.PreWssConsumeQuota(ctx, info, usage)
  492. return err
  493. }
  494. func OpenaiHandlerWithUsage(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  495. defer service.CloseResponseBodyGracefully(resp)
  496. responseBody, err := io.ReadAll(resp.Body)
  497. if err != nil {
  498. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  499. }
  500. var usageResp dto.SimpleResponse
  501. err = common.Unmarshal(responseBody, &usageResp)
  502. if err != nil {
  503. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  504. }
  505. // 写入新的 response body
  506. service.IOCopyBytesGracefully(c, resp, responseBody)
  507. // Once we've written to the client, we should not return errors anymore
  508. // because the upstream has already consumed resources and returned content
  509. // We should still perform billing even if parsing fails
  510. // format
  511. if usageResp.InputTokens > 0 {
  512. usageResp.PromptTokens += usageResp.InputTokens
  513. }
  514. if usageResp.OutputTokens > 0 {
  515. usageResp.CompletionTokens += usageResp.OutputTokens
  516. }
  517. if usageResp.InputTokensDetails != nil {
  518. usageResp.PromptTokensDetails.ImageTokens += usageResp.InputTokensDetails.ImageTokens
  519. usageResp.PromptTokensDetails.TextTokens += usageResp.InputTokensDetails.TextTokens
  520. }
  521. applyUsagePostProcessing(info, &usageResp.Usage, responseBody)
  522. return &usageResp.Usage, nil
  523. }
  524. func applyUsagePostProcessing(info *relaycommon.RelayInfo, usage *dto.Usage, responseBody []byte) {
  525. if info == nil || usage == nil {
  526. return
  527. }
  528. switch info.ChannelType {
  529. case constant.ChannelTypeDeepSeek:
  530. if usage.PromptTokensDetails.CachedTokens == 0 && usage.PromptCacheHitTokens != 0 {
  531. usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens
  532. }
  533. case constant.ChannelTypeZhipu_v4:
  534. // 智普的cached_tokens在标准位置: usage.prompt_tokens_details.cached_tokens
  535. if usage.PromptTokensDetails.CachedTokens == 0 {
  536. if usage.InputTokensDetails != nil && usage.InputTokensDetails.CachedTokens > 0 {
  537. usage.PromptTokensDetails.CachedTokens = usage.InputTokensDetails.CachedTokens
  538. } else if cachedTokens, ok := extractCachedTokensFromBody(responseBody); ok {
  539. usage.PromptTokensDetails.CachedTokens = cachedTokens
  540. } else if usage.PromptCacheHitTokens > 0 {
  541. usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens
  542. }
  543. }
  544. case constant.ChannelTypeMoonshot:
  545. // Moonshot的cached_tokens在非标准位置: choices[].usage.cached_tokens
  546. if usage.PromptTokensDetails.CachedTokens == 0 {
  547. if usage.InputTokensDetails != nil && usage.InputTokensDetails.CachedTokens > 0 {
  548. usage.PromptTokensDetails.CachedTokens = usage.InputTokensDetails.CachedTokens
  549. } else if cachedTokens, ok := extractMoonshotCachedTokensFromBody(responseBody); ok {
  550. usage.PromptTokensDetails.CachedTokens = cachedTokens
  551. } else if cachedTokens, ok := extractCachedTokensFromBody(responseBody); ok {
  552. usage.PromptTokensDetails.CachedTokens = cachedTokens
  553. } else if usage.PromptCacheHitTokens > 0 {
  554. usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens
  555. }
  556. }
  557. }
  558. }
  559. func extractCachedTokensFromBody(body []byte) (int, bool) {
  560. if len(body) == 0 {
  561. return 0, false
  562. }
  563. var payload struct {
  564. Usage struct {
  565. PromptTokensDetails struct {
  566. CachedTokens *int `json:"cached_tokens"`
  567. } `json:"prompt_tokens_details"`
  568. CachedTokens *int `json:"cached_tokens"`
  569. PromptCacheHitTokens *int `json:"prompt_cache_hit_tokens"`
  570. } `json:"usage"`
  571. }
  572. if err := common.Unmarshal(body, &payload); err != nil {
  573. return 0, false
  574. }
  575. if payload.Usage.PromptTokensDetails.CachedTokens != nil {
  576. return *payload.Usage.PromptTokensDetails.CachedTokens, true
  577. }
  578. if payload.Usage.CachedTokens != nil {
  579. return *payload.Usage.CachedTokens, true
  580. }
  581. if payload.Usage.PromptCacheHitTokens != nil {
  582. return *payload.Usage.PromptCacheHitTokens, true
  583. }
  584. return 0, false
  585. }
  586. // extractMoonshotCachedTokensFromBody 从Moonshot的非标准位置提取cached_tokens
  587. // Moonshot的流式响应格式: {"choices":[{"usage":{"cached_tokens":111}}]}
  588. func extractMoonshotCachedTokensFromBody(body []byte) (int, bool) {
  589. if len(body) == 0 {
  590. return 0, false
  591. }
  592. var payload struct {
  593. Choices []struct {
  594. Usage struct {
  595. CachedTokens *int `json:"cached_tokens"`
  596. } `json:"usage"`
  597. } `json:"choices"`
  598. }
  599. if err := common.Unmarshal(body, &payload); err != nil {
  600. return 0, false
  601. }
  602. // 遍历choices查找cached_tokens
  603. for _, choice := range payload.Choices {
  604. if choice.Usage.CachedTokens != nil && *choice.Usage.CachedTokens > 0 {
  605. return *choice.Usage.CachedTokens, true
  606. }
  607. }
  608. return 0, false
  609. }