chat_via_responses.go 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522
  1. package openai
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "strings"
  7. "time"
  8. "github.com/QuantumNous/new-api/common"
  9. "github.com/QuantumNous/new-api/dto"
  10. "github.com/QuantumNous/new-api/logger"
  11. relaycommon "github.com/QuantumNous/new-api/relay/common"
  12. "github.com/QuantumNous/new-api/relay/helper"
  13. "github.com/QuantumNous/new-api/service"
  14. "github.com/QuantumNous/new-api/types"
  15. "github.com/gin-gonic/gin"
  16. )
  17. func responsesStreamIndexKey(itemID string, idx *int) string {
  18. if itemID == "" {
  19. return ""
  20. }
  21. if idx == nil {
  22. return itemID
  23. }
  24. return fmt.Sprintf("%s:%d", itemID, *idx)
  25. }
  26. func stringDeltaFromPrefix(prev string, next string) string {
  27. if next == "" {
  28. return ""
  29. }
  30. if prev != "" && strings.HasPrefix(next, prev) {
  31. return next[len(prev):]
  32. }
  33. return next
  34. }
  35. func OaiResponsesToChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  36. if resp == nil || resp.Body == nil {
  37. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  38. }
  39. defer service.CloseResponseBodyGracefully(resp)
  40. var responsesResp dto.OpenAIResponsesResponse
  41. body, err := io.ReadAll(resp.Body)
  42. if err != nil {
  43. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  44. }
  45. if err := common.Unmarshal(body, &responsesResp); err != nil {
  46. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  47. }
  48. if oaiError := responsesResp.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
  49. return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
  50. }
  51. chatId := helper.GetResponseID(c)
  52. chatResp, usage, err := service.ResponsesResponseToChatCompletionsResponse(&responsesResp, chatId)
  53. if err != nil {
  54. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  55. }
  56. if usage == nil || usage.TotalTokens == 0 {
  57. text := service.ExtractOutputTextFromResponses(&responsesResp)
  58. usage = service.ResponseText2Usage(c, text, info.UpstreamModelName, info.GetEstimatePromptTokens())
  59. chatResp.Usage = *usage
  60. }
  61. var responseBody []byte
  62. switch info.RelayFormat {
  63. case types.RelayFormatClaude:
  64. claudeResp := service.ResponseOpenAI2Claude(chatResp, info)
  65. responseBody, err = common.Marshal(claudeResp)
  66. case types.RelayFormatGemini:
  67. geminiResp := service.ResponseOpenAI2Gemini(chatResp, info)
  68. responseBody, err = common.Marshal(geminiResp)
  69. default:
  70. responseBody, err = common.Marshal(chatResp)
  71. }
  72. if err != nil {
  73. return nil, types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
  74. }
  75. service.IOCopyBytesGracefully(c, resp, responseBody)
  76. return usage, nil
  77. }
  78. func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  79. if resp == nil || resp.Body == nil {
  80. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  81. }
  82. defer service.CloseResponseBodyGracefully(resp)
  83. responseId := helper.GetResponseID(c)
  84. createAt := time.Now().Unix()
  85. model := info.UpstreamModelName
  86. var (
  87. usage = &dto.Usage{}
  88. outputText strings.Builder
  89. usageText strings.Builder
  90. sentStart bool
  91. sentStop bool
  92. sawToolCall bool
  93. streamErr *types.NewAPIError
  94. )
  95. toolCallIndexByID := make(map[string]int)
  96. toolCallNameByID := make(map[string]string)
  97. toolCallArgsByID := make(map[string]string)
  98. toolCallNameSent := make(map[string]bool)
  99. toolCallCanonicalIDByItemID := make(map[string]string)
  100. //reasoningSummaryTextByKey := make(map[string]string)
  101. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo == nil {
  102. info.ClaudeConvertInfo = &relaycommon.ClaudeConvertInfo{LastMessagesType: relaycommon.LastMessageTypeNone}
  103. }
  104. sendChatChunk := func(chunk *dto.ChatCompletionsStreamResponse) bool {
  105. if chunk == nil {
  106. return true
  107. }
  108. if info.RelayFormat == types.RelayFormatOpenAI {
  109. if err := helper.ObjectData(c, chunk); err != nil {
  110. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  111. return false
  112. }
  113. return true
  114. }
  115. chunkData, err := common.Marshal(chunk)
  116. if err != nil {
  117. streamErr = types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
  118. return false
  119. }
  120. if err := HandleStreamFormat(c, info, string(chunkData), false, false); err != nil {
  121. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  122. return false
  123. }
  124. return true
  125. }
  126. sendStartIfNeeded := func() bool {
  127. if sentStart {
  128. return true
  129. }
  130. if !sendChatChunk(helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)) {
  131. return false
  132. }
  133. sentStart = true
  134. return true
  135. }
  136. //sendReasoningDelta := func(delta string) bool {
  137. // if delta == "" {
  138. // return true
  139. // }
  140. // if !sendStartIfNeeded() {
  141. // return false
  142. // }
  143. //
  144. // usageText.WriteString(delta)
  145. // chunk := &dto.ChatCompletionsStreamResponse{
  146. // Id: responseId,
  147. // Object: "chat.completion.chunk",
  148. // Created: createAt,
  149. // Model: model,
  150. // Choices: []dto.ChatCompletionsStreamResponseChoice{
  151. // {
  152. // Index: 0,
  153. // Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  154. // ReasoningContent: &delta,
  155. // },
  156. // },
  157. // },
  158. // }
  159. // if err := helper.ObjectData(c, chunk); err != nil {
  160. // streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  161. // return false
  162. // }
  163. // return true
  164. //}
  165. sendReasoningSummaryDelta := func(delta string) bool {
  166. if delta == "" {
  167. return true
  168. }
  169. if !sendStartIfNeeded() {
  170. return false
  171. }
  172. usageText.WriteString(delta)
  173. chunk := &dto.ChatCompletionsStreamResponse{
  174. Id: responseId,
  175. Object: "chat.completion.chunk",
  176. Created: createAt,
  177. Model: model,
  178. Choices: []dto.ChatCompletionsStreamResponseChoice{
  179. {
  180. Index: 0,
  181. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  182. ReasoningContent: &delta,
  183. },
  184. },
  185. },
  186. }
  187. if !sendChatChunk(chunk) {
  188. return false
  189. }
  190. return true
  191. }
  192. sendToolCallDelta := func(callID string, name string, argsDelta string) bool {
  193. if callID == "" {
  194. return true
  195. }
  196. if outputText.Len() > 0 {
  197. // Prefer streaming assistant text over tool calls to match non-stream behavior.
  198. return true
  199. }
  200. if !sendStartIfNeeded() {
  201. return false
  202. }
  203. idx, ok := toolCallIndexByID[callID]
  204. if !ok {
  205. idx = len(toolCallIndexByID)
  206. toolCallIndexByID[callID] = idx
  207. }
  208. if name != "" {
  209. toolCallNameByID[callID] = name
  210. }
  211. if toolCallNameByID[callID] != "" {
  212. name = toolCallNameByID[callID]
  213. }
  214. tool := dto.ToolCallResponse{
  215. ID: callID,
  216. Type: "function",
  217. Function: dto.FunctionResponse{
  218. Arguments: argsDelta,
  219. },
  220. }
  221. tool.SetIndex(idx)
  222. if name != "" && !toolCallNameSent[callID] {
  223. tool.Function.Name = name
  224. toolCallNameSent[callID] = true
  225. }
  226. chunk := &dto.ChatCompletionsStreamResponse{
  227. Id: responseId,
  228. Object: "chat.completion.chunk",
  229. Created: createAt,
  230. Model: model,
  231. Choices: []dto.ChatCompletionsStreamResponseChoice{
  232. {
  233. Index: 0,
  234. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  235. ToolCalls: []dto.ToolCallResponse{tool},
  236. },
  237. },
  238. },
  239. }
  240. if !sendChatChunk(chunk) {
  241. return false
  242. }
  243. sawToolCall = true
  244. // Include tool call data in the local builder for fallback token estimation.
  245. if tool.Function.Name != "" {
  246. usageText.WriteString(tool.Function.Name)
  247. }
  248. if argsDelta != "" {
  249. usageText.WriteString(argsDelta)
  250. }
  251. return true
  252. }
  253. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  254. if streamErr != nil {
  255. return false
  256. }
  257. var streamResp dto.ResponsesStreamResponse
  258. if err := common.UnmarshalJsonStr(data, &streamResp); err != nil {
  259. logger.LogError(c, "failed to unmarshal responses stream event: "+err.Error())
  260. return true
  261. }
  262. switch streamResp.Type {
  263. case "response.created":
  264. if streamResp.Response != nil {
  265. if streamResp.Response.Model != "" {
  266. model = streamResp.Response.Model
  267. }
  268. if streamResp.Response.CreatedAt != 0 {
  269. createAt = int64(streamResp.Response.CreatedAt)
  270. }
  271. }
  272. //case "response.reasoning_text.delta":
  273. //if !sendReasoningDelta(streamResp.Delta) {
  274. // return false
  275. //}
  276. //case "response.reasoning_text.done":
  277. case "response.reasoning_summary_text.delta":
  278. if !sendReasoningSummaryDelta(streamResp.Delta) {
  279. return false
  280. }
  281. case "response.reasoning_summary_text.done":
  282. //case "response.reasoning_summary_part.added", "response.reasoning_summary_part.done":
  283. // key := responsesStreamIndexKey(strings.TrimSpace(streamResp.ItemID), streamResp.SummaryIndex)
  284. // if key == "" || streamResp.Part == nil {
  285. // break
  286. // }
  287. // // Only handle summary text parts, ignore other part types.
  288. // if streamResp.Part.Type != "" && streamResp.Part.Type != "summary_text" {
  289. // break
  290. // }
  291. // prev := reasoningSummaryTextByKey[key]
  292. // next := streamResp.Part.Text
  293. // delta := stringDeltaFromPrefix(prev, next)
  294. // reasoningSummaryTextByKey[key] = next
  295. // if !sendReasoningSummaryDelta(delta) {
  296. // return false
  297. // }
  298. case "response.output_text.delta":
  299. if !sendStartIfNeeded() {
  300. return false
  301. }
  302. if streamResp.Delta != "" {
  303. outputText.WriteString(streamResp.Delta)
  304. usageText.WriteString(streamResp.Delta)
  305. delta := streamResp.Delta
  306. chunk := &dto.ChatCompletionsStreamResponse{
  307. Id: responseId,
  308. Object: "chat.completion.chunk",
  309. Created: createAt,
  310. Model: model,
  311. Choices: []dto.ChatCompletionsStreamResponseChoice{
  312. {
  313. Index: 0,
  314. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  315. Content: &delta,
  316. },
  317. },
  318. },
  319. }
  320. if !sendChatChunk(chunk) {
  321. return false
  322. }
  323. }
  324. case "response.output_item.added", "response.output_item.done":
  325. if streamResp.Item == nil {
  326. break
  327. }
  328. if streamResp.Item.Type != "function_call" {
  329. break
  330. }
  331. itemID := strings.TrimSpace(streamResp.Item.ID)
  332. callID := strings.TrimSpace(streamResp.Item.CallId)
  333. if callID == "" {
  334. callID = itemID
  335. }
  336. if itemID != "" && callID != "" {
  337. toolCallCanonicalIDByItemID[itemID] = callID
  338. }
  339. name := strings.TrimSpace(streamResp.Item.Name)
  340. if name != "" {
  341. toolCallNameByID[callID] = name
  342. }
  343. newArgs := streamResp.Item.Arguments
  344. prevArgs := toolCallArgsByID[callID]
  345. argsDelta := ""
  346. if newArgs != "" {
  347. if strings.HasPrefix(newArgs, prevArgs) {
  348. argsDelta = newArgs[len(prevArgs):]
  349. } else {
  350. argsDelta = newArgs
  351. }
  352. toolCallArgsByID[callID] = newArgs
  353. }
  354. if !sendToolCallDelta(callID, name, argsDelta) {
  355. return false
  356. }
  357. case "response.function_call_arguments.delta":
  358. itemID := strings.TrimSpace(streamResp.ItemID)
  359. callID := toolCallCanonicalIDByItemID[itemID]
  360. if callID == "" {
  361. callID = itemID
  362. }
  363. if callID == "" {
  364. break
  365. }
  366. toolCallArgsByID[callID] += streamResp.Delta
  367. if !sendToolCallDelta(callID, "", streamResp.Delta) {
  368. return false
  369. }
  370. case "response.function_call_arguments.done":
  371. case "response.completed":
  372. if streamResp.Response != nil {
  373. if streamResp.Response.Model != "" {
  374. model = streamResp.Response.Model
  375. }
  376. if streamResp.Response.CreatedAt != 0 {
  377. createAt = int64(streamResp.Response.CreatedAt)
  378. }
  379. if streamResp.Response.Usage != nil {
  380. if streamResp.Response.Usage.InputTokens != 0 {
  381. usage.PromptTokens = streamResp.Response.Usage.InputTokens
  382. usage.InputTokens = streamResp.Response.Usage.InputTokens
  383. }
  384. if streamResp.Response.Usage.OutputTokens != 0 {
  385. usage.CompletionTokens = streamResp.Response.Usage.OutputTokens
  386. usage.OutputTokens = streamResp.Response.Usage.OutputTokens
  387. }
  388. if streamResp.Response.Usage.TotalTokens != 0 {
  389. usage.TotalTokens = streamResp.Response.Usage.TotalTokens
  390. } else {
  391. usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
  392. }
  393. if streamResp.Response.Usage.InputTokensDetails != nil {
  394. usage.PromptTokensDetails.CachedTokens = streamResp.Response.Usage.InputTokensDetails.CachedTokens
  395. usage.PromptTokensDetails.ImageTokens = streamResp.Response.Usage.InputTokensDetails.ImageTokens
  396. usage.PromptTokensDetails.AudioTokens = streamResp.Response.Usage.InputTokensDetails.AudioTokens
  397. }
  398. if streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens != 0 {
  399. usage.CompletionTokenDetails.ReasoningTokens = streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens
  400. }
  401. }
  402. }
  403. if !sendStartIfNeeded() {
  404. return false
  405. }
  406. if !sentStop {
  407. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo != nil {
  408. info.ClaudeConvertInfo.Usage = usage
  409. }
  410. finishReason := "stop"
  411. if sawToolCall && outputText.Len() == 0 {
  412. finishReason = "tool_calls"
  413. }
  414. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  415. if !sendChatChunk(stop) {
  416. return false
  417. }
  418. sentStop = true
  419. }
  420. case "response.error", "response.failed":
  421. if streamResp.Response != nil {
  422. if oaiErr := streamResp.Response.GetOpenAIError(); oaiErr != nil && oaiErr.Type != "" {
  423. streamErr = types.WithOpenAIError(*oaiErr, http.StatusInternalServerError)
  424. return false
  425. }
  426. }
  427. streamErr = types.NewOpenAIError(fmt.Errorf("responses stream error: %s", streamResp.Type), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  428. return false
  429. default:
  430. }
  431. return true
  432. })
  433. if streamErr != nil {
  434. return nil, streamErr
  435. }
  436. if usage.TotalTokens == 0 {
  437. usage = service.ResponseText2Usage(c, usageText.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  438. }
  439. if !sentStart {
  440. if !sendChatChunk(helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)) {
  441. return nil, streamErr
  442. }
  443. }
  444. if !sentStop {
  445. if info.RelayFormat == types.RelayFormatClaude && info.ClaudeConvertInfo != nil {
  446. info.ClaudeConvertInfo.Usage = usage
  447. }
  448. finishReason := "stop"
  449. if sawToolCall && outputText.Len() == 0 {
  450. finishReason = "tool_calls"
  451. }
  452. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  453. if !sendChatChunk(stop) {
  454. return nil, streamErr
  455. }
  456. }
  457. if info.RelayFormat == types.RelayFormatOpenAI && info.ShouldIncludeUsage && usage != nil {
  458. if err := helper.ObjectData(c, helper.GenerateFinalUsageResponse(responseId, createAt, model, *usage)); err != nil {
  459. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  460. }
  461. }
  462. if info.RelayFormat == types.RelayFormatOpenAI {
  463. helper.Done(c)
  464. }
  465. return usage, nil
  466. }