chat_via_responses.go 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481
  1. package openai
  2. import (
  3. "fmt"
  4. "io"
  5. "net/http"
  6. "strings"
  7. "time"
  8. "github.com/QuantumNous/new-api/common"
  9. "github.com/QuantumNous/new-api/dto"
  10. "github.com/QuantumNous/new-api/logger"
  11. relaycommon "github.com/QuantumNous/new-api/relay/common"
  12. "github.com/QuantumNous/new-api/relay/helper"
  13. "github.com/QuantumNous/new-api/service"
  14. "github.com/QuantumNous/new-api/types"
  15. "github.com/gin-gonic/gin"
  16. )
  17. func responsesStreamIndexKey(itemID string, idx *int) string {
  18. if itemID == "" {
  19. return ""
  20. }
  21. if idx == nil {
  22. return itemID
  23. }
  24. return fmt.Sprintf("%s:%d", itemID, *idx)
  25. }
  26. func stringDeltaFromPrefix(prev string, next string) string {
  27. if next == "" {
  28. return ""
  29. }
  30. if prev != "" && strings.HasPrefix(next, prev) {
  31. return next[len(prev):]
  32. }
  33. return next
  34. }
  35. func OaiResponsesToChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  36. if resp == nil || resp.Body == nil {
  37. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  38. }
  39. defer service.CloseResponseBodyGracefully(resp)
  40. var responsesResp dto.OpenAIResponsesResponse
  41. body, err := io.ReadAll(resp.Body)
  42. if err != nil {
  43. return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
  44. }
  45. if err := common.Unmarshal(body, &responsesResp); err != nil {
  46. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  47. }
  48. if oaiError := responsesResp.GetOpenAIError(); oaiError != nil && oaiError.Type != "" {
  49. return nil, types.WithOpenAIError(*oaiError, resp.StatusCode)
  50. }
  51. chatId := helper.GetResponseID(c)
  52. chatResp, usage, err := service.ResponsesResponseToChatCompletionsResponse(&responsesResp, chatId)
  53. if err != nil {
  54. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
  55. }
  56. if usage == nil || usage.TotalTokens == 0 {
  57. text := service.ExtractOutputTextFromResponses(&responsesResp)
  58. usage = service.ResponseText2Usage(c, text, info.UpstreamModelName, info.GetEstimatePromptTokens())
  59. chatResp.Usage = *usage
  60. }
  61. chatBody, err := common.Marshal(chatResp)
  62. if err != nil {
  63. return nil, types.NewOpenAIError(err, types.ErrorCodeJsonMarshalFailed, http.StatusInternalServerError)
  64. }
  65. service.IOCopyBytesGracefully(c, resp, chatBody)
  66. return usage, nil
  67. }
  68. func OaiResponsesToChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  69. if resp == nil || resp.Body == nil {
  70. return nil, types.NewOpenAIError(fmt.Errorf("invalid response"), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  71. }
  72. defer service.CloseResponseBodyGracefully(resp)
  73. responseId := helper.GetResponseID(c)
  74. createAt := time.Now().Unix()
  75. model := info.UpstreamModelName
  76. var (
  77. usage = &dto.Usage{}
  78. outputText strings.Builder
  79. usageText strings.Builder
  80. sentStart bool
  81. sentStop bool
  82. sawToolCall bool
  83. streamErr *types.NewAPIError
  84. )
  85. toolCallIndexByID := make(map[string]int)
  86. toolCallNameByID := make(map[string]string)
  87. toolCallArgsByID := make(map[string]string)
  88. toolCallNameSent := make(map[string]bool)
  89. toolCallCanonicalIDByItemID := make(map[string]string)
  90. //reasoningSummaryTextByKey := make(map[string]string)
  91. sendStartIfNeeded := func() bool {
  92. if sentStart {
  93. return true
  94. }
  95. if err := helper.ObjectData(c, helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)); err != nil {
  96. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  97. return false
  98. }
  99. sentStart = true
  100. return true
  101. }
  102. //sendReasoningDelta := func(delta string) bool {
  103. // if delta == "" {
  104. // return true
  105. // }
  106. // if !sendStartIfNeeded() {
  107. // return false
  108. // }
  109. //
  110. // usageText.WriteString(delta)
  111. // chunk := &dto.ChatCompletionsStreamResponse{
  112. // Id: responseId,
  113. // Object: "chat.completion.chunk",
  114. // Created: createAt,
  115. // Model: model,
  116. // Choices: []dto.ChatCompletionsStreamResponseChoice{
  117. // {
  118. // Index: 0,
  119. // Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  120. // ReasoningContent: &delta,
  121. // },
  122. // },
  123. // },
  124. // }
  125. // if err := helper.ObjectData(c, chunk); err != nil {
  126. // streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  127. // return false
  128. // }
  129. // return true
  130. //}
  131. sendReasoningSummaryDelta := func(delta string) bool {
  132. if delta == "" {
  133. return true
  134. }
  135. if !sendStartIfNeeded() {
  136. return false
  137. }
  138. usageText.WriteString(delta)
  139. chunk := &dto.ChatCompletionsStreamResponse{
  140. Id: responseId,
  141. Object: "chat.completion.chunk",
  142. Created: createAt,
  143. Model: model,
  144. Choices: []dto.ChatCompletionsStreamResponseChoice{
  145. {
  146. Index: 0,
  147. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  148. ReasoningContent: &delta,
  149. },
  150. },
  151. },
  152. }
  153. if err := helper.ObjectData(c, chunk); err != nil {
  154. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  155. return false
  156. }
  157. return true
  158. }
  159. sendToolCallDelta := func(callID string, name string, argsDelta string) bool {
  160. if callID == "" {
  161. return true
  162. }
  163. if outputText.Len() > 0 {
  164. // Prefer streaming assistant text over tool calls to match non-stream behavior.
  165. return true
  166. }
  167. if !sendStartIfNeeded() {
  168. return false
  169. }
  170. idx, ok := toolCallIndexByID[callID]
  171. if !ok {
  172. idx = len(toolCallIndexByID)
  173. toolCallIndexByID[callID] = idx
  174. }
  175. if name != "" {
  176. toolCallNameByID[callID] = name
  177. }
  178. if toolCallNameByID[callID] != "" {
  179. name = toolCallNameByID[callID]
  180. }
  181. tool := dto.ToolCallResponse{
  182. ID: callID,
  183. Type: "function",
  184. Function: dto.FunctionResponse{
  185. Arguments: argsDelta,
  186. },
  187. }
  188. tool.SetIndex(idx)
  189. if name != "" && !toolCallNameSent[callID] {
  190. tool.Function.Name = name
  191. toolCallNameSent[callID] = true
  192. }
  193. chunk := &dto.ChatCompletionsStreamResponse{
  194. Id: responseId,
  195. Object: "chat.completion.chunk",
  196. Created: createAt,
  197. Model: model,
  198. Choices: []dto.ChatCompletionsStreamResponseChoice{
  199. {
  200. Index: 0,
  201. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  202. ToolCalls: []dto.ToolCallResponse{tool},
  203. },
  204. },
  205. },
  206. }
  207. if err := helper.ObjectData(c, chunk); err != nil {
  208. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  209. return false
  210. }
  211. sawToolCall = true
  212. // Include tool call data in the local builder for fallback token estimation.
  213. if tool.Function.Name != "" {
  214. usageText.WriteString(tool.Function.Name)
  215. }
  216. if argsDelta != "" {
  217. usageText.WriteString(argsDelta)
  218. }
  219. return true
  220. }
  221. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  222. if streamErr != nil {
  223. return false
  224. }
  225. var streamResp dto.ResponsesStreamResponse
  226. if err := common.UnmarshalJsonStr(data, &streamResp); err != nil {
  227. logger.LogError(c, "failed to unmarshal responses stream event: "+err.Error())
  228. return true
  229. }
  230. switch streamResp.Type {
  231. case "response.created":
  232. if streamResp.Response != nil {
  233. if streamResp.Response.Model != "" {
  234. model = streamResp.Response.Model
  235. }
  236. if streamResp.Response.CreatedAt != 0 {
  237. createAt = int64(streamResp.Response.CreatedAt)
  238. }
  239. }
  240. //case "response.reasoning_text.delta":
  241. //if !sendReasoningDelta(streamResp.Delta) {
  242. // return false
  243. //}
  244. //case "response.reasoning_text.done":
  245. case "response.reasoning_summary_text.delta":
  246. if !sendReasoningSummaryDelta(streamResp.Delta) {
  247. return false
  248. }
  249. case "response.reasoning_summary_text.done":
  250. //case "response.reasoning_summary_part.added", "response.reasoning_summary_part.done":
  251. // key := responsesStreamIndexKey(strings.TrimSpace(streamResp.ItemID), streamResp.SummaryIndex)
  252. // if key == "" || streamResp.Part == nil {
  253. // break
  254. // }
  255. // // Only handle summary text parts, ignore other part types.
  256. // if streamResp.Part.Type != "" && streamResp.Part.Type != "summary_text" {
  257. // break
  258. // }
  259. // prev := reasoningSummaryTextByKey[key]
  260. // next := streamResp.Part.Text
  261. // delta := stringDeltaFromPrefix(prev, next)
  262. // reasoningSummaryTextByKey[key] = next
  263. // if !sendReasoningSummaryDelta(delta) {
  264. // return false
  265. // }
  266. case "response.output_text.delta":
  267. if !sendStartIfNeeded() {
  268. return false
  269. }
  270. if streamResp.Delta != "" {
  271. outputText.WriteString(streamResp.Delta)
  272. usageText.WriteString(streamResp.Delta)
  273. delta := streamResp.Delta
  274. chunk := &dto.ChatCompletionsStreamResponse{
  275. Id: responseId,
  276. Object: "chat.completion.chunk",
  277. Created: createAt,
  278. Model: model,
  279. Choices: []dto.ChatCompletionsStreamResponseChoice{
  280. {
  281. Index: 0,
  282. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  283. Content: &delta,
  284. },
  285. },
  286. },
  287. }
  288. if err := helper.ObjectData(c, chunk); err != nil {
  289. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  290. return false
  291. }
  292. }
  293. case "response.output_item.added", "response.output_item.done":
  294. if streamResp.Item == nil {
  295. break
  296. }
  297. if streamResp.Item.Type != "function_call" {
  298. break
  299. }
  300. itemID := strings.TrimSpace(streamResp.Item.ID)
  301. callID := strings.TrimSpace(streamResp.Item.CallId)
  302. if callID == "" {
  303. callID = itemID
  304. }
  305. if itemID != "" && callID != "" {
  306. toolCallCanonicalIDByItemID[itemID] = callID
  307. }
  308. name := strings.TrimSpace(streamResp.Item.Name)
  309. if name != "" {
  310. toolCallNameByID[callID] = name
  311. }
  312. newArgs := streamResp.Item.Arguments
  313. prevArgs := toolCallArgsByID[callID]
  314. argsDelta := ""
  315. if newArgs != "" {
  316. if strings.HasPrefix(newArgs, prevArgs) {
  317. argsDelta = newArgs[len(prevArgs):]
  318. } else {
  319. argsDelta = newArgs
  320. }
  321. toolCallArgsByID[callID] = newArgs
  322. }
  323. if !sendToolCallDelta(callID, name, argsDelta) {
  324. return false
  325. }
  326. case "response.function_call_arguments.delta":
  327. itemID := strings.TrimSpace(streamResp.ItemID)
  328. callID := toolCallCanonicalIDByItemID[itemID]
  329. if callID == "" {
  330. callID = itemID
  331. }
  332. if callID == "" {
  333. break
  334. }
  335. toolCallArgsByID[callID] += streamResp.Delta
  336. if !sendToolCallDelta(callID, "", streamResp.Delta) {
  337. return false
  338. }
  339. case "response.function_call_arguments.done":
  340. case "response.completed":
  341. if streamResp.Response != nil {
  342. if streamResp.Response.Model != "" {
  343. model = streamResp.Response.Model
  344. }
  345. if streamResp.Response.CreatedAt != 0 {
  346. createAt = int64(streamResp.Response.CreatedAt)
  347. }
  348. if streamResp.Response.Usage != nil {
  349. if streamResp.Response.Usage.InputTokens != 0 {
  350. usage.PromptTokens = streamResp.Response.Usage.InputTokens
  351. usage.InputTokens = streamResp.Response.Usage.InputTokens
  352. }
  353. if streamResp.Response.Usage.OutputTokens != 0 {
  354. usage.CompletionTokens = streamResp.Response.Usage.OutputTokens
  355. usage.OutputTokens = streamResp.Response.Usage.OutputTokens
  356. }
  357. if streamResp.Response.Usage.TotalTokens != 0 {
  358. usage.TotalTokens = streamResp.Response.Usage.TotalTokens
  359. } else {
  360. usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
  361. }
  362. if streamResp.Response.Usage.InputTokensDetails != nil {
  363. usage.PromptTokensDetails.CachedTokens = streamResp.Response.Usage.InputTokensDetails.CachedTokens
  364. usage.PromptTokensDetails.ImageTokens = streamResp.Response.Usage.InputTokensDetails.ImageTokens
  365. usage.PromptTokensDetails.AudioTokens = streamResp.Response.Usage.InputTokensDetails.AudioTokens
  366. }
  367. if streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens != 0 {
  368. usage.CompletionTokenDetails.ReasoningTokens = streamResp.Response.Usage.CompletionTokenDetails.ReasoningTokens
  369. }
  370. }
  371. }
  372. if !sendStartIfNeeded() {
  373. return false
  374. }
  375. if !sentStop {
  376. finishReason := "stop"
  377. if sawToolCall && outputText.Len() == 0 {
  378. finishReason = "tool_calls"
  379. }
  380. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  381. if err := helper.ObjectData(c, stop); err != nil {
  382. streamErr = types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  383. return false
  384. }
  385. sentStop = true
  386. }
  387. case "response.error", "response.failed":
  388. if streamResp.Response != nil {
  389. if oaiErr := streamResp.Response.GetOpenAIError(); oaiErr != nil && oaiErr.Type != "" {
  390. streamErr = types.WithOpenAIError(*oaiErr, http.StatusInternalServerError)
  391. return false
  392. }
  393. }
  394. streamErr = types.NewOpenAIError(fmt.Errorf("responses stream error: %s", streamResp.Type), types.ErrorCodeBadResponse, http.StatusInternalServerError)
  395. return false
  396. default:
  397. }
  398. return true
  399. })
  400. if streamErr != nil {
  401. return nil, streamErr
  402. }
  403. if usage.TotalTokens == 0 {
  404. usage = service.ResponseText2Usage(c, usageText.String(), info.UpstreamModelName, info.GetEstimatePromptTokens())
  405. }
  406. if !sentStart {
  407. if err := helper.ObjectData(c, helper.GenerateStartEmptyResponse(responseId, createAt, model, nil)); err != nil {
  408. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  409. }
  410. }
  411. if !sentStop {
  412. finishReason := "stop"
  413. if sawToolCall && outputText.Len() == 0 {
  414. finishReason = "tool_calls"
  415. }
  416. stop := helper.GenerateStopResponse(responseId, createAt, model, finishReason)
  417. if err := helper.ObjectData(c, stop); err != nil {
  418. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  419. }
  420. }
  421. if info.ShouldIncludeUsage && usage != nil {
  422. if err := helper.ObjectData(c, helper.GenerateFinalUsageResponse(responseId, createAt, model, *usage)); err != nil {
  423. return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponse, http.StatusInternalServerError)
  424. }
  425. }
  426. helper.Done(c)
  427. return usage, nil
  428. }