chat_via_responses.go 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550
  1. package openai
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "strings"
  7. "time"
  8. "github.com/QuantumNous/new-api/common"
  9. "github.com/QuantumNous/new-api/dto"
  10. "github.com/QuantumNous/new-api/logger"
  11. relaycommon "github.com/QuantumNous/new-api/relay/common"
  12. "github.com/QuantumNous/new-api/relay/helper"
  13. "github.com/QuantumNous/new-api/service"
  14. "github.com/QuantumNous/new-api/types"
  15. "github.com/gin-gonic/gin"
  16. )
  17. func responsesStreamIndexKey(itemID string, idx *int) string {
  18. if itemID == "" {
  19. return ""
  20. }
  21. if idx == nil {
  22. return itemID
  23. }
  24. return fmt.Sprintf("%s:%d", itemID, *idx)
  25. }
  26. func stringDeltaFromPrefix(prev string, next string) string {
  27. if next == "" {
  28. return ""
  29. }
  30. if prev != "" && strings.HasPrefix(next, prev) {
  31. return next[len(prev):]
  32. }
  33. return next
  34. }
  35. func OaiResponsesToChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  36. if resp == nil || resp.Body == nil {
  37. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  38. }
  39. defer service.CloseResponseBodyGracefully(resp)
  40. var responsesResp dto.OpenAIResponsesResponse
  41. body, err := io.ReadAll(resp.Body)
  42. if err != nil {
  43. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  44. }
  45. if err := common.Unmarshal(body, &responsesResp); err != nil {
  46. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  47. }
  48. if oaiError := responsesResp.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
  49. return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
  50. }
  51. chatId := helper.GetResponseID(c)
  52. chatResp, usage, err := service.ResponsesResponseToChatCompletionsResponse(&responsesResp, chatId)
  53. if err != nil {
  54. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  55. }
  56. if usage == nil || usage.TotalTokens == 0 {
  57. text := service.ExtractOutputTextFromResponses(&responsesResp)
  58. usage = service.ResponseText2Usage(c, text, info.UpstreamModelName, info.GetEstimatePromptTokens())
  59. chatResp.Usage = *usage
  60. }
  61. var responseBody []byte
  62. switch info.RelayFormat {
  63. case types.RelayFormatClaude:
  64. claudeResp := service.ResponseOpenAI2Claude(chatResp, info)
  65. responseBody, err = common.Marshal(claudeResp)
  66. case types.RelayFormatGemini:
  67. geminiResp := service.ResponseOpenAI2Gemini(chatResp, info)
  68. responseBody, err = common.Marshal(geminiResp)
  69. default:
  70. responseBody, err = common.Marshal(chatResp)
  71. }
  72. if err != nil {
  73. return nil, types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
  74. }
  75. service.IOCopyBytesGracefully(c, resp, responseBody)
  76. return usage, nil
  77. }
  78. func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  79. if resp == nil || resp.Body == nil {
  80. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  81. }
  82. defer service.CloseResponseBodyGracefully(resp)
  83. responseId := helper.GetResponseID(c)
  84. createAt := time.Now().Unix()
  85. model := info.UpstreamModelName
  86. var (
  87. usage = &dto.Usage{}
  88. outputText strings.Builder
  89. usageText strings.Builder
  90. sentStart bool
  91. sentStop bool
  92. sawToolCall bool
  93. streamErr *types.NewAPIError
  94. )
  95. toolCallIndexByID := make(map[string]int)
  96. toolCallNameByID := make(map[string]string)
  97. toolCallArgsByID := make(map[string]string)
  98. toolCallNameSent := make(map[string]bool)
  99. toolCallCanonicalIDByItemID := make(map[string]string)
  100. hasSentReasoningSummary := false
  101. needsReasoningSummarySeparator := false
  102. //reasoningSummaryTextByKey := make(map[string]string)
  103. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo == nil {
  104. info.ClaudeConvertInfo = &relaycommon.ClaudeConvertInfo{LastMessagesType: relaycommon.LastMessageTypeNone}
  105. }
  106. sendChatChunk := func(chunk *dto.ChatCompletionsStreamResponse) bool {
  107. if chunk == nil {
  108. return true
  109. }
  110. if info.RelayFormat == types.RelayFormatOpenAI {
  111. if err := helper.ObjectData(c, chunk); err != nil {
  112. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  113. return false
  114. }
  115. return true
  116. }
  117. chunkData, err := common.Marshal(chunk)
  118. if err != nil {
  119. streamErr = types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
  120. return false
  121. }
  122. if err := HandleStreamFormat(c, info, string(chunkData), false, false); err != nil {
  123. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  124. return false
  125. }
  126. return true
  127. }
  128. sendStartIfNeeded := func() bool {
  129. if sentStart {
  130. return true
  131. }
  132. if !sendChatChunk(helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)) {
  133. return false
  134. }
  135. sentStart = true
  136. return true
  137. }
  138. //sendReasoningDelta := func(delta string) bool {
  139. // if delta == "" {
  140. // return true
  141. // }
  142. // if !sendStartIfNeeded() {
  143. // return false
  144. // }
  145. //
  146. // usageText.WriteString(delta)
  147. // chunk := &dto.ChatCompletionsStreamResponse{
  148. // Id: responseId,
  149. // Object: "chat.completion.chunk",
  150. // Created: createAt,
  151. // Model: model,
  152. // Choices: []dto.ChatCompletionsStreamResponseChoice{
  153. // {
  154. // Index: 0,
  155. // Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  156. // ReasoningContent: &delta,
  157. // },
  158. // },
  159. // },
  160. // }
  161. // if err := helper.ObjectData(c, chunk); err != nil {
  162. // streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  163. // return false
  164. // }
  165. // return true
  166. //}
  167. sendReasoningSummaryDelta := func(delta string) bool {
  168. if delta == "" {
  169. return true
  170. }
  171. if needsReasoningSummarySeparator {
  172. if strings.HasPrefix(delta, "\n\n") {
  173. needsReasoningSummarySeparator = false
  174. } else if strings.HasPrefix(delta, "\n") {
  175. delta = "\n" + delta
  176. needsReasoningSummarySeparator = false
  177. } else {
  178. delta = "\n\n" + delta
  179. needsReasoningSummarySeparator = false
  180. }
  181. }
  182. if !sendStartIfNeeded() {
  183. return false
  184. }
  185. usageText.WriteString(delta)
  186. chunk := &dto.ChatCompletionsStreamResponse{
  187. Id: responseId,
  188. Object: "chat.completion.chunk",
  189. Created: createAt,
  190. Model: model,
  191. Choices: []dto.ChatCompletionsStreamResponseChoice{
  192. {
  193. Index: 0,
  194. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  195. ReasoningContent: &delta,
  196. },
  197. },
  198. },
  199. }
  200. if !sendChatChunk(chunk) {
  201. return false
  202. }
  203. hasSentReasoningSummary = true
  204. return true
  205. }
  206. sendToolCallDelta := func(callID string, name string, argsDelta string) bool {
  207. if callID == "" {
  208. return true
  209. }
  210. if outputText.Len() > 0 {
  211. // Prefer streaming assistant text over tool calls to match non-stream behavior.
  212. return true
  213. }
  214. if !sendStartIfNeeded() {
  215. return false
  216. }
  217. idx, ok := toolCallIndexByID[callID]
  218. if !ok {
  219. idx = len(toolCallIndexByID)
  220. toolCallIndexByID[callID] = idx
  221. }
  222. if name != "" {
  223. toolCallNameByID[callID] = name
  224. }
  225. if toolCallNameByID[callID] != "" {
  226. name = toolCallNameByID[callID]
  227. }
  228. tool := dto.ToolCallResponse{
  229. ID: callID,
  230. Type: "function",
  231. Function: dto.FunctionResponse{
  232. Arguments: argsDelta,
  233. },
  234. }
  235. tool.SetIndex(idx)
  236. if name != "" && !toolCallNameSent[callID] {
  237. tool.Function.Name = name
  238. toolCallNameSent[callID] = true
  239. }
  240. chunk := &dto.ChatCompletionsStreamResponse{
  241. Id: responseId,
  242. Object: "chat.completion.chunk",
  243. Created: createAt,
  244. Model: model,
  245. Choices: []dto.ChatCompletionsStreamResponseChoice{
  246. {
  247. Index: 0,
  248. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  249. ToolCalls: []dto.ToolCallResponse{tool},
  250. },
  251. },
  252. },
  253. }
  254. if !sendChatChunk(chunk) {
  255. return false
  256. }
  257. sawToolCall = true
  258. // Include tool call data in the local builder for fallback token estimation.
  259. if tool.Function.Name != "" {
  260. usageText.WriteString(tool.Function.Name)
  261. }
  262. if argsDelta != "" {
  263. usageText.WriteString(argsDelta)
  264. }
  265. return true
  266. }
  267. helper.StreamScannerHandler(c, resp, info, func(data string, sr *helper.StreamResult) {
  268. if streamErr != nil {
  269. sr.Stop(streamErr)
  270. return
  271. }
  272. var streamResp dto.ResponsesStreamResponse
  273. if err := common.UnmarshalJsonStr(data, &streamResp); err != nil {
  274. logger.LogError(c, "failed to unmarshal responses stream event: "+err.Error())
  275. sr.Error(err)
  276. return
  277. }
  278. switch streamResp.Type {
  279. case "response.created":
  280. if streamResp.Response != nil {
  281. if streamResp.Response.Model != "" {
  282. model = streamResp.Response.Model
  283. }
  284. if streamResp.Response.CreatedAt != 0 {
  285. createAt = int64(streamResp.Response.CreatedAt)
  286. }
  287. }
  288. //case "response.reasoning_text.delta":
  289. //if !sendReasoningDelta(streamResp.Delta) {
  290. // sr.Stop(streamErr)
  291. // return
  292. //}
  293. //case "response.reasoning_text.done":
  294. case "response.reasoning_summary_text.delta":
  295. if !sendReasoningSummaryDelta(streamResp.Delta) {
  296. sr.Stop(streamErr)
  297. return
  298. }
  299. case "response.reasoning_summary_text.done":
  300. if hasSentReasoningSummary {
  301. needsReasoningSummarySeparator = true
  302. }
  303. //case "response.reasoning_summary_part.added", "response.reasoning_summary_part.done":
  304. // key := responsesStreamIndexKey(strings.TrimSpace(streamResp.ItemID), streamResp.SummaryIndex)
  305. // if key == "" || streamResp.Part == nil {
  306. // break
  307. // }
  308. // // Only handle summary text parts, ignore other part types.
  309. // if streamResp.Part.Type != "" && streamResp.Part.Type != "summary_text" {
  310. // break
  311. // }
  312. // prev := reasoningSummaryTextByKey[key]
  313. // next := streamResp.Part.Text
  314. // delta := stringDeltaFromPrefix(prev, next)
  315. // reasoningSummaryTextByKey[key] = next
  316. // if !sendReasoningSummaryDelta(delta) {
  317. // sr.Stop(streamErr)
  318. // return
  319. // }
  320. case "response.output_text.delta":
  321. if !sendStartIfNeeded() {
  322. sr.Stop(streamErr)
  323. return
  324. }
  325. if streamResp.Delta != "" {
  326. outputText.WriteString(streamResp.Delta)
  327. usageText.WriteString(streamResp.Delta)
  328. delta := streamResp.Delta
  329. chunk := &dto.ChatCompletionsStreamResponse{
  330. Id: responseId,
  331. Object: "chat.completion.chunk",
  332. Created: createAt,
  333. Model: model,
  334. Choices: []dto.ChatCompletionsStreamResponseChoice{
  335. {
  336. Index: 0,
  337. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  338. Content: &delta,
  339. },
  340. },
  341. },
  342. }
  343. if !sendChatChunk(chunk) {
  344. sr.Stop(streamErr)
  345. return
  346. }
  347. }
  348. case "response.output_item.added", "response.output_item.done":
  349. if streamResp.Item == nil {
  350. break
  351. }
  352. if streamResp.Item.Type != "function_call" {
  353. break
  354. }
  355. itemID := strings.TrimSpace(streamResp.Item.ID)
  356. callID := strings.TrimSpace(streamResp.Item.CallId)
  357. if callID == "" {
  358. callID = itemID
  359. }
  360. if itemID != "" && callID != "" {
  361. toolCallCanonicalIDByItemID[itemID] = callID
  362. }
  363. name := strings.TrimSpace(streamResp.Item.Name)
  364. if name != "" {
  365. toolCallNameByID[callID] = name
  366. }
  367. newArgs := streamResp.Item.ArgumentsString()
  368. prevArgs := toolCallArgsByID[callID]
  369. argsDelta := ""
  370. if newArgs != "" {
  371. if strings.HasPrefix(newArgs, prevArgs) {
  372. argsDelta = newArgs[len(prevArgs):]
  373. } else {
  374. argsDelta = newArgs
  375. }
  376. toolCallArgsByID[callID] = newArgs
  377. }
  378. if !sendToolCallDelta(callID, name, argsDelta) {
  379. sr.Stop(streamErr)
  380. return
  381. }
  382. case "response.function_call_arguments.delta":
  383. itemID := strings.TrimSpace(streamResp.ItemID)
  384. callID := toolCallCanonicalIDByItemID[itemID]
  385. if callID == "" {
  386. callID = itemID
  387. }
  388. if callID == "" {
  389. break
  390. }
  391. toolCallArgsByID[callID] += streamResp.Delta
  392. if !sendToolCallDelta(callID, "", streamResp.Delta) {
  393. sr.Stop(streamErr)
  394. return
  395. }
  396. case "response.function_call_arguments.done":
  397. case "response.completed":
  398. if streamResp.Response != nil {
  399. if streamResp.Response.Model != "" {
  400. model = streamResp.Response.Model
  401. }
  402. if streamResp.Response.CreatedAt != 0 {
  403. createAt = int64(streamResp.Response.CreatedAt)
  404. }
  405. if streamResp.Response.Usage != nil {
  406. if streamResp.Response.Usage.InputTokens != 0 {
  407. usage.PromptTokens = streamResp.Response.Usage.InputTokens
  408. usage.InputTokens = streamResp.Response.Usage.InputTokens
  409. }
  410. if streamResp.Response.Usage.OutputTokens != 0 {
  411. usage.CompletionTokens = streamResp.Response.Usage.OutputTokens
  412. usage.OutputTokens = streamResp.Response.Usage.OutputTokens
  413. }
  414. if streamResp.Response.Usage.TotalTokens != 0 {
  415. usage.TotalTokens = streamResp.Response.Usage.TotalTokens
  416. } else {
  417. usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
  418. }
  419. if streamResp.Response.Usage.InputTokensDetails != nil {
  420. usage.PromptTokensDetails.CachedTokens = streamResp.Response.Usage.InputTokensDetails.CachedTokens
  421. usage.PromptTokensDetails.ImageTokens = streamResp.Response.Usage.InputTokensDetails.ImageTokens
  422. usage.PromptTokensDetails.AudioTokens = streamResp.Response.Usage.InputTokensDetails.AudioTokens
  423. }
  424. if streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens != 0 {
  425. usage.CompletionTokenDetails.ReasoningTokens = streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens
  426. }
  427. }
  428. }
  429. if !sendStartIfNeeded() {
  430. sr.Stop(streamErr)
  431. return
  432. }
  433. if !sentStop {
  434. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo != nil {
  435. info.ClaudeConvertInfo.Usage = usage
  436. }
  437. finishReason := "stop"
  438. if sawToolCall && outputText.Len() == 0 {
  439. finishReason = "tool_calls"
  440. }
  441. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  442. if !sendChatChunk(stop) {
  443. sr.Stop(streamErr)
  444. return
  445. }
  446. sentStop = true
  447. }
  448. case "response.error", "response.failed":
  449. if streamResp.Response != nil {
  450. if oaiErr := streamResp.Response.GetOpenAIError(); oaiErr != nil && oaiErr.Type != "" {
  451. streamErr = types.WithOpenAIError(*oaiErr, http.StatusInternalServerError)
  452. sr.Stop(streamErr)
  453. return
  454. }
  455. }
  456. streamErr = types.NewOpenAIError(fmt.Errorf("responses stream error: %s", streamResp.Type), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  457. sr.Stop(streamErr)
  458. return
  459. default:
  460. }
  461. })
  462. if streamErr != nil {
  463. return nil, streamErr
  464. }
  465. if usage.TotalTokens == 0 {
  466. usage = service.ResponseText2Usage(c, usageText.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  467. }
  468. if !sentStart {
  469. if !sendChatChunk(helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)) {
  470. return nil, streamErr
  471. }
  472. }
  473. if !sentStop {
  474. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo != nil {
  475. info.ClaudeConvertInfo.Usage = usage
  476. }
  477. finishReason := "stop"
  478. if sawToolCall && outputText.Len() == 0 {
  479. finishReason = "tool_calls"
  480. }
  481. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  482. if !sendChatChunk(stop) {
  483. return nil, streamErr
  484. }
  485. }
  486. if info.RelayFormat == types.RelayFormatOpenAI && info.ShouldIncludeUsage && usage != nil {
  487. if err := helper.ObjectData(c, helper.GenerateFinalUsageResponse(responseId, createAt, model, *usage)); err != nil {
  488. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  489. }
  490. }
  491. if info.RelayFormat == types.RelayFormatOpenAI {
  492. helper.Done(c)
  493. }
  494. return usage, nil
  495. }