relay-openai.go 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641
  1. package openai
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "strings"
  7. "github.com/QuantumNous/new-api/common"
  8. "github.com/QuantumNous/new-api/constant"
  9. "github.com/QuantumNous/new-api/dto"
  10. "github.com/QuantumNous/new-api/logger"
  11. "github.com/QuantumNous/new-api/relay/channel/openrouter"
  12. relaycommon "github.com/QuantumNous/new-api/relay/common"
  13. "github.com/QuantumNous/new-api/relay/helper"
  14. "github.com/QuantumNous/new-api/service"
  15. "github.com/QuantumNous/new-api/types"
  16. "github.com/bytedance/gopkg/util/gopool"
  17. "github.com/gin-gonic/gin"
  18. "github.com/gorilla/websocket"
  19. )
  20. func sendStreamData(c *gin.Context, info *relaycommon.RelayInfo, data string, forceFormat bool, thinkToContent bool) error {
  21. if data == "" {
  22. return nil
  23. }
  24. if !forceFormat && !thinkToContent {
  25. return helper.StringData(c, data)
  26. }
  27. var lastStreamResponse dto.ChatCompletionsStreamResponse
  28. if err := common.UnmarshalJsonStr(data, &lastStreamResponse); err != nil {
  29. return err
  30. }
  31. if !thinkToContent {
  32. return helper.ObjectData(c, lastStreamResponse)
  33. }
  34. hasThinkingContent := false
  35. hasContent := false
  36. var thinkingContent strings.Builder
  37. for _, choice := range lastStreamResponse.Choices {
  38. if len(choice.Delta.GetReasoningContent()) > 0 {
  39. hasThinkingContent = true
  40. thinkingContent.WriteString(choice.Delta.GetReasoningContent())
  41. }
  42. if len(choice.Delta.GetContentString()) > 0 {
  43. hasContent = true
  44. }
  45. }
  46. // Handle think to content conversion
  47. if info.ThinkingContentInfo.IsFirstThinkingContent {
  48. if hasThinkingContent {
  49. response := lastStreamResponse.Copy()
  50. for i := range response.Choices {
  51. // send `think` tag with thinking content
  52. response.Choices[i].Delta.SetContentString("<think>\n" + thinkingContent.String())
  53. response.Choices[i].Delta.ReasoningContent = nil
  54. response.Choices[i].Delta.Reasoning = nil
  55. }
  56. info.ThinkingContentInfo.IsFirstThinkingContent = false
  57. info.ThinkingContentInfo.HasSentThinkingContent = true
  58. return helper.ObjectData(c, response)
  59. }
  60. }
  61. if lastStreamResponse.Choices == nil || len(lastStreamResponse.Choices) == 0 {
  62. return helper.ObjectData(c, lastStreamResponse)
  63. }
  64. // Process each choice
  65. for i, choice := range lastStreamResponse.Choices {
  66. // Handle transition from thinking to content
  67. // only send `</think>` tag when previous thinking content has been sent
  68. if hasContent && !info.ThinkingContentInfo.SendLastThinkingContent && info.ThinkingContentInfo.HasSentThinkingContent {
  69. response := lastStreamResponse.Copy()
  70. for j := range response.Choices {
  71. response.Choices[j].Delta.SetContentString("\n</think>\n")
  72. response.Choices[j].Delta.ReasoningContent = nil
  73. response.Choices[j].Delta.Reasoning = nil
  74. }
  75. info.ThinkingContentInfo.SendLastThinkingContent = true
  76. helper.ObjectData(c, response)
  77. }
  78. // Convert reasoning content to regular content if any
  79. if len(choice.Delta.GetReasoningContent()) > 0 {
  80. lastStreamResponse.Choices[i].Delta.SetContentString(choice.Delta.GetReasoningContent())
  81. lastStreamResponse.Choices[i].Delta.ReasoningContent = nil
  82. lastStreamResponse.Choices[i].Delta.Reasoning = nil
  83. } else if !hasThinkingContent && !hasContent {
  84. // flush thinking content
  85. lastStreamResponse.Choices[i].Delta.ReasoningContent = nil
  86. lastStreamResponse.Choices[i].Delta.Reasoning = nil
  87. }
  88. }
  89. return helper.ObjectData(c, lastStreamResponse)
  90. }
  91. func OaiStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  92. if resp == nil || resp.Body == nil {
  93. logger.LogError(c, "invalid response or response body")
  94. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  95. }
  96. defer service.CloseResponseBodyGracefully(resp)
  97. model := info.UpstreamModelName
  98. var responseId string
  99. var createAt int64 = 0
  100. var systemFingerprint string
  101. var containStreamUsage bool
  102. var responseTextBuilder strings.Builder
  103. var toolCount int
  104. var usage = &dto.Usage{}
  105. var streamItems []string // store stream items
  106. var lastStreamData string
  107. var secondLastStreamData string // 存储倒数第二个stream data,用于音频模型
  108. // 检查是否为音频模型
  109. isAudioModel := strings.Contains(strings.ToLower(model), "audio")
  110. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  111. if lastStreamData != "" {
  112. err := HandleStreamFormat(c, info, lastStreamData, info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  113. if err != nil {
  114. common.SysLog("error handling stream format: " + err.Error())
  115. }
  116. }
  117. if len(data) > 0 {
  118. // 对音频模型,保存倒数第二个stream data
  119. if isAudioModel && lastStreamData != "" {
  120. secondLastStreamData = lastStreamData
  121. }
  122. lastStreamData = data
  123. streamItems = append(streamItems, data)
  124. }
  125. return true
  126. })
  127. // 对音频模型,从倒数第二个stream data中提取usage信息
  128. if isAudioModel && secondLastStreamData != "" {
  129. var streamResp struct {
  130. Usage *dto.Usage `json:"usage"`
  131. }
  132. err := common.Unmarshal([]byte(secondLastStreamData), &streamResp)
  133. if err == nil && streamResp.Usage != nil && service.ValidUsage(streamResp.Usage) {
  134. usage = streamResp.Usage
  135. containStreamUsage = true
  136. if common.DebugEnabled {
  137. logger.LogDebug(c, fmt.Sprintf("Audio model usage extracted from second last SSE: PromptTokens=%d, CompletionTokens=%d, TotalTokens=%d, InputTokens=%d, OutputTokens=%d",
  138. usage.PromptTokens, usage.CompletionTokens, usage.TotalTokens,
  139. usage.InputTokens, usage.OutputTokens))
  140. }
  141. }
  142. }
  143. // 处理最后的响应
  144. shouldSendLastResp := true
  145. if err := handleLastResponse(lastStreamData, &responseId, &createAt, &systemFingerprint, &model, &usage,
  146. &containStreamUsage, info, &shouldSendLastResp); err != nil {
  147. logger.LogError(c, fmt.Sprintf("error handling last response: %s, lastStreamData: [%s]", err.Error(), lastStreamData))
  148. }
  149. if info.RelayFormat == types.RelayFormatOpenAI {
  150. if shouldSendLastResp {
  151. _ = sendStreamData(c, info, lastStreamData, info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
  152. }
  153. }
  154. // 处理token计算
  155. if err := processTokens(info.RelayMode, streamItems, &responseTextBuilder, &toolCount); err != nil {
  156. logger.LogError(c, "error processing tokens: "+err.Error())
  157. }
  158. if !containStreamUsage {
  159. usage = service.ResponseText2Usage(c, responseTextBuilder.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  160. usage.CompletionTokens += toolCount * 7
  161. }
  162. applyUsagePostProcessing(info, usage, nil)
  163. HandleFinalResponse(c, info, lastStreamData, responseId, createAt, model, systemFingerprint, usage, containStreamUsage)
  164. return usage, nil
  165. }
  166. func OpenaiHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  167. defer service.CloseResponseBodyGracefully(resp)
  168. var simpleResponse dto.OpenAITextResponse
  169. responseBody, err := io.ReadAll(resp.Body)
  170. if err != nil {
  171. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  172. }
  173. if common.DebugEnabled {
  174. println("upstream response body:", string(responseBody))
  175. }
  176. // Unmarshal to simpleResponse
  177. if info.ChannelType == constant.ChannelTypeOpenRouter && info.ChannelOtherSettings.IsOpenRouterEnterprise() {
  178. // 尝试解析为 openrouter enterprise
  179. var enterpriseResponse openrouter.OpenRouterEnterpriseResponse
  180. err = common.Unmarshal(responseBody, &enterpriseResponse)
  181. if err != nil {
  182. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  183. }
  184. if enterpriseResponse.Success {
  185. responseBody = enterpriseResponse.Data
  186. } else {
  187. logger.LogError(c, fmt.Sprintf("openrouter enterprise response success=false, data: %s", enterpriseResponse.Data))
  188. return nil, types.NewOpenAIError(fmt.Errorf("openrouter response success=false"), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  189. }
  190. }
  191. err = common.Unmarshal(responseBody, &simpleResponse)
  192. if err != nil {
  193. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  194. }
  195. if oaiError := simpleResponse.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
  196. return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
  197. }
  198. forceFormat := false
  199. if info.ChannelSetting.ForceFormat {
  200. forceFormat = true
  201. }
  202. usageModified := false
  203. if simpleResponse.Usage.PromptTokens == 0 {
  204. completionTokens := simpleResponse.Usage.CompletionTokens
  205. if completionTokens == 0 {
  206. for _, choice := range simpleResponse.Choices {
  207. ctkm := service.CountTextToken(choice.Message.StringContent()+choice.Message.ReasoningContent+choice.Message.Reasoning, info.UpstreamModelName)
  208. completionTokens += ctkm
  209. }
  210. }
  211. simpleResponse.Usage = dto.Usage{
  212. PromptTokens: info.GetEstimatePromptTokens(),
  213. CompletionTokens: completionTokens,
  214. TotalTokens: info.GetEstimatePromptTokens() + completionTokens,
  215. }
  216. usageModified = true
  217. }
  218. applyUsagePostProcessing(info, &simpleResponse.Usage, responseBody)
  219. switch info.RelayFormat {
  220. case types.RelayFormatOpenAI:
  221. if usageModified {
  222. var bodyMap map[string]interface{}
  223. err = common.Unmarshal(responseBody, &bodyMap)
  224. if err != nil {
  225. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  226. }
  227. bodyMap["usage"] = simpleResponse.Usage
  228. responseBody, _ = common.Marshal(bodyMap)
  229. }
  230. if forceFormat {
  231. responseBody, err = common.Marshal(simpleResponse)
  232. if err != nil {
  233. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  234. }
  235. } else {
  236. break
  237. }
  238. case types.RelayFormatClaude:
  239. claudeResp := service.ResponseOpenAI2Claude(&simpleResponse, info)
  240. claudeRespStr, err := common.Marshal(claudeResp)
  241. if err != nil {
  242. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  243. }
  244. responseBody = claudeRespStr
  245. case types.RelayFormatGemini:
  246. geminiResp := service.ResponseOpenAI2Gemini(&simpleResponse, info)
  247. geminiRespStr, err := common.Marshal(geminiResp)
  248. if err != nil {
  249. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  250. }
  251. responseBody = geminiRespStr
  252. }
  253. service.IOCopyBytesGracefully(c, resp, responseBody)
  254. return &simpleResponse.Usage, nil
  255. }
  256. func streamTTSResponse(c *gin.Context, resp *http.Response) {
  257. c.Writer.WriteHeaderNow()
  258. flusher, ok := c.Writer.(http.Flusher)
  259. if !ok {
  260. logger.LogWarn(c, "streaming not supported")
  261. _, err := io.Copy(c.Writer, resp.Body)
  262. if err != nil {
  263. logger.LogWarn(c, err.Error())
  264. }
  265. return
  266. }
  267. buffer := make([]byte, 4096)
  268. for {
  269. n, err := resp.Body.Read(buffer)
  270. //logger.LogInfo(c, fmt.Sprintf("streamTTSResponse read %d bytes", n))
  271. if n > 0 {
  272. if _, writeErr := c.Writer.Write(buffer[:n]); writeErr != nil {
  273. logger.LogError(c, writeErr.Error())
  274. break
  275. }
  276. flusher.Flush()
  277. }
  278. if err != nil {
  279. if err != io.EOF {
  280. logger.LogError(c, err.Error())
  281. }
  282. break
  283. }
  284. }
  285. }
  286. func OpenaiRealtimeHandler(c *gin.Context, info *relaycommon.RelayInfo) (*types.NewAPIError, *dto.RealtimeUsage) {
  287. if info == nil || info.ClientWs == nil || info.TargetWs == nil {
  288. return types.NewError(fmt.Errorf("invalid websocket connection"), types.ErrorCodeBadResponse), nil
  289. }
  290. info.IsStream = true
  291. clientConn := info.ClientWs
  292. targetConn := info.TargetWs
  293. clientClosed := make(chan struct{})
  294. targetClosed := make(chan struct{})
  295. sendChan := make(chan []byte, 100)
  296. receiveChan := make(chan []byte, 100)
  297. errChan := make(chan error, 2)
  298. usage := &dto.RealtimeUsage{}
  299. localUsage := &dto.RealtimeUsage{}
  300. sumUsage := &dto.RealtimeUsage{}
  301. gopool.Go(func() {
  302. defer func() {
  303. if r := recover(); r != nil {
  304. errChan <- fmt.Errorf("panic in client reader: %v", r)
  305. }
  306. }()
  307. for {
  308. select {
  309. case <-c.Done():
  310. return
  311. default:
  312. _, message, err := clientConn.ReadMessage()
  313. if err != nil {
  314. if !websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {
  315. errChan <- fmt.Errorf("error reading from client: %v", err)
  316. }
  317. close(clientClosed)
  318. return
  319. }
  320. realtimeEvent := &dto.RealtimeEvent{}
  321. err = common.Unmarshal(message, realtimeEvent)
  322. if err != nil {
  323. errChan <- fmt.Errorf("error unmarshalling message: %v", err)
  324. return
  325. }
  326. if realtimeEvent.Type == dto.RealtimeEventTypeSessionUpdate {
  327. if realtimeEvent.Session != nil {
  328. if realtimeEvent.Session.Tools != nil {
  329. info.RealtimeTools = realtimeEvent.Session.Tools
  330. }
  331. }
  332. }
  333. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  334. if err != nil {
  335. errChan <- fmt.Errorf("error counting text token: %v", err)
  336. return
  337. }
  338. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  339. localUsage.TotalTokens += textToken + audioToken
  340. localUsage.InputTokens += textToken + audioToken
  341. localUsage.InputTokenDetails.TextTokens += textToken
  342. localUsage.InputTokenDetails.AudioTokens += audioToken
  343. err = helper.WssString(c, targetConn, string(message))
  344. if err != nil {
  345. errChan <- fmt.Errorf("error writing to target: %v", err)
  346. return
  347. }
  348. select {
  349. case sendChan <- message:
  350. default:
  351. }
  352. }
  353. }
  354. })
  355. gopool.Go(func() {
  356. defer func() {
  357. if r := recover(); r != nil {
  358. errChan <- fmt.Errorf("panic in target reader: %v", r)
  359. }
  360. }()
  361. for {
  362. select {
  363. case <-c.Done():
  364. return
  365. default:
  366. _, message, err := targetConn.ReadMessage()
  367. if err != nil {
  368. if !websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseGoingAway) {
  369. errChan <- fmt.Errorf("error reading from target: %v", err)
  370. }
  371. close(targetClosed)
  372. return
  373. }
  374. info.SetFirstResponseTime()
  375. realtimeEvent := &dto.RealtimeEvent{}
  376. err = common.Unmarshal(message, realtimeEvent)
  377. if err != nil {
  378. errChan <- fmt.Errorf("error unmarshalling message: %v", err)
  379. return
  380. }
  381. if realtimeEvent.Type == dto.RealtimeEventTypeResponseDone {
  382. realtimeUsage := realtimeEvent.Response.Usage
  383. if realtimeUsage != nil {
  384. usage.TotalTokens += realtimeUsage.TotalTokens
  385. usage.InputTokens += realtimeUsage.InputTokens
  386. usage.OutputTokens += realtimeUsage.OutputTokens
  387. usage.InputTokenDetails.AudioTokens += realtimeUsage.InputTokenDetails.AudioTokens
  388. usage.InputTokenDetails.CachedTokens += realtimeUsage.InputTokenDetails.CachedTokens
  389. usage.InputTokenDetails.TextTokens += realtimeUsage.InputTokenDetails.TextTokens
  390. usage.OutputTokenDetails.AudioTokens += realtimeUsage.OutputTokenDetails.AudioTokens
  391. usage.OutputTokenDetails.TextTokens += realtimeUsage.OutputTokenDetails.TextTokens
  392. err := preConsumeUsage(c, info, usage, sumUsage)
  393. if err != nil {
  394. errChan <- fmt.Errorf("error consume usage: %v", err)
  395. return
  396. }
  397. // 本次计费完成,清除
  398. usage = &dto.RealtimeUsage{}
  399. localUsage = &dto.RealtimeUsage{}
  400. } else {
  401. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  402. if err != nil {
  403. errChan <- fmt.Errorf("error counting text token: %v", err)
  404. return
  405. }
  406. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  407. localUsage.TotalTokens += textToken + audioToken
  408. info.IsFirstRequest = false
  409. localUsage.InputTokens += textToken + audioToken
  410. localUsage.InputTokenDetails.TextTokens += textToken
  411. localUsage.InputTokenDetails.AudioTokens += audioToken
  412. err = preConsumeUsage(c, info, localUsage, sumUsage)
  413. if err != nil {
  414. errChan <- fmt.Errorf("error consume usage: %v", err)
  415. return
  416. }
  417. // 本次计费完成,清除
  418. localUsage = &dto.RealtimeUsage{}
  419. // print now usage
  420. }
  421. logger.LogInfo(c, fmt.Sprintf("realtime streaming sumUsage: %v", sumUsage))
  422. logger.LogInfo(c, fmt.Sprintf("realtime streaming localUsage: %v", localUsage))
  423. logger.LogInfo(c, fmt.Sprintf("realtime streaming localUsage: %v", localUsage))
  424. } else if realtimeEvent.Type == dto.RealtimeEventTypeSessionUpdated || realtimeEvent.Type == dto.RealtimeEventTypeSessionCreated {
  425. realtimeSession := realtimeEvent.Session
  426. if realtimeSession != nil {
  427. // update audio format
  428. info.InputAudioFormat = common.GetStringIfEmpty(realtimeSession.InputAudioFormat, info.InputAudioFormat)
  429. info.OutputAudioFormat = common.GetStringIfEmpty(realtimeSession.OutputAudioFormat, info.OutputAudioFormat)
  430. }
  431. } else {
  432. textToken, audioToken, err := service.CountTokenRealtime(info, *realtimeEvent, info.UpstreamModelName)
  433. if err != nil {
  434. errChan <- fmt.Errorf("error counting text token: %v", err)
  435. return
  436. }
  437. logger.LogInfo(c, fmt.Sprintf("type: %s, textToken: %d, audioToken: %d", realtimeEvent.Type, textToken, audioToken))
  438. localUsage.TotalTokens += textToken + audioToken
  439. localUsage.OutputTokens += textToken + audioToken
  440. localUsage.OutputTokenDetails.TextTokens += textToken
  441. localUsage.OutputTokenDetails.AudioTokens += audioToken
  442. }
  443. err = helper.WssString(c, clientConn, string(message))
  444. if err != nil {
  445. errChan <- fmt.Errorf("error writing to client: %v", err)
  446. return
  447. }
  448. select {
  449. case receiveChan <- message:
  450. default:
  451. }
  452. }
  453. }
  454. })
  455. select {
  456. case <-clientClosed:
  457. case <-targetClosed:
  458. case err := <-errChan:
  459. //return service.OpenAIErrorWrapper(err, "realtime_error", http.StatusInternalServerError), nil
  460. logger.LogError(c, "realtime error: "+err.Error())
  461. case <-c.Done():
  462. }
  463. if usage.TotalTokens != 0 {
  464. _ = preConsumeUsage(c, info, usage, sumUsage)
  465. }
  466. if localUsage.TotalTokens != 0 {
  467. _ = preConsumeUsage(c, info, localUsage, sumUsage)
  468. }
  469. // check usage total tokens, if 0, use local usage
  470. return nil, sumUsage
  471. }
  472. func preConsumeUsage(ctx *gin.Context, info *relaycommon.RelayInfo, usage *dto.RealtimeUsage, totalUsage *dto.RealtimeUsage) error {
  473. if usage == nil || totalUsage == nil {
  474. return fmt.Errorf("invalid usage pointer")
  475. }
  476. totalUsage.TotalTokens += usage.TotalTokens
  477. totalUsage.InputTokens += usage.InputTokens
  478. totalUsage.OutputTokens += usage.OutputTokens
  479. totalUsage.InputTokenDetails.CachedTokens += usage.InputTokenDetails.CachedTokens
  480. totalUsage.InputTokenDetails.TextTokens += usage.InputTokenDetails.TextTokens
  481. totalUsage.InputTokenDetails.AudioTokens += usage.InputTokenDetails.AudioTokens
  482. totalUsage.OutputTokenDetails.TextTokens += usage.OutputTokenDetails.TextTokens
  483. totalUsage.OutputTokenDetails.AudioTokens += usage.OutputTokenDetails.AudioTokens
  484. // clear usage
  485. err := service.PreWssConsumeQuota(ctx, info, usage)
  486. return err
  487. }
  488. func OpenaiHandlerWithUsage(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  489. defer service.CloseResponseBodyGracefully(resp)
  490. responseBody, err := io.ReadAll(resp.Body)
  491. if err != nil {
  492. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  493. }
  494. var usageResp dto.SimpleResponse
  495. err = common.Unmarshal(responseBody, &usageResp)
  496. if err != nil {
  497. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  498. }
  499. // 写入新的 response body
  500. service.IOCopyBytesGracefully(c, resp, responseBody)
  501. // Once we've written to the client, we should not return errors anymore
  502. // because the upstream has already consumed resources and returned content
  503. // We should still perform billing even if parsing fails
  504. // format
  505. if usageResp.InputTokens > 0 {
  506. usageResp.PromptTokens += usageResp.InputTokens
  507. }
  508. if usageResp.OutputTokens > 0 {
  509. usageResp.CompletionTokens += usageResp.OutputTokens
  510. }
  511. if usageResp.InputTokensDetails != nil {
  512. usageResp.PromptTokensDetails.ImageTokens += usageResp.InputTokensDetails.ImageTokens
  513. usageResp.PromptTokensDetails.TextTokens += usageResp.InputTokensDetails.TextTokens
  514. }
  515. applyUsagePostProcessing(info, &usageResp.Usage, responseBody)
  516. return &usageResp.Usage, nil
  517. }
  518. func applyUsagePostProcessing(info *relaycommon.RelayInfo, usage *dto.Usage, responseBody []byte) {
  519. if info == nil || usage == nil {
  520. return
  521. }
  522. switch info.ChannelType {
  523. case constant.ChannelTypeDeepSeek:
  524. if usage.PromptTokensDetails.CachedTokens == 0 && usage.PromptCacheHitTokens != 0 {
  525. usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens
  526. }
  527. case constant.ChannelTypeZhipu_v4, constant.ChannelTypeMoonshot:
  528. if usage.PromptTokensDetails.CachedTokens == 0 {
  529. if usage.InputTokensDetails != nil && usage.InputTokensDetails.CachedTokens > 0 {
  530. usage.PromptTokensDetails.CachedTokens = usage.InputTokensDetails.CachedTokens
  531. } else if cachedTokens, ok := extractCachedTokensFromBody(responseBody); ok {
  532. usage.PromptTokensDetails.CachedTokens = cachedTokens
  533. } else if usage.PromptCacheHitTokens > 0 {
  534. usage.PromptTokensDetails.CachedTokens = usage.PromptCacheHitTokens
  535. }
  536. }
  537. }
  538. }
  539. func extractCachedTokensFromBody(body []byte) (int, bool) {
  540. if len(body) == 0 {
  541. return 0, false
  542. }
  543. var payload struct {
  544. Usage struct {
  545. PromptTokensDetails struct {
  546. CachedTokens *int `json:"cached_tokens"`
  547. } `json:"prompt_tokens_details"`
  548. CachedTokens *int `json:"cached_tokens"`
  549. PromptCacheHitTokens *int `json:"prompt_cache_hit_tokens"`
  550. } `json:"usage"`
  551. }
  552. if err := common.Unmarshal(body, &payload); err != nil {
  553. return 0, false
  554. }
  555. if payload.Usage.PromptTokensDetails.CachedTokens != nil {
  556. return *payload.Usage.PromptTokensDetails.CachedTokens, true
  557. }
  558. if payload.Usage.CachedTokens != nil {
  559. return *payload.Usage.CachedTokens, true
  560. }
  561. if payload.Usage.PromptCacheHitTokens != nil {
  562. return *payload.Usage.PromptCacheHitTokens, true
  563. }
  564. return 0, false
  565. }