relay-openai.go 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149
  1. package controller
  2. import (
  3. "bufio"
  4. "bytes"
  5. "encoding/json"
  6. "github.com/gin-gonic/gin"
  7. "io"
  8. "net/http"
  9. "one-api/common"
  10. "strings"
  11. )
  12. func openaiStreamHandler(c *gin.Context, resp *http.Response, relayMode int) (*OpenAIErrorWithStatusCode, string) {
  13. var responseTextBuilder strings.Builder
  14. scanner := bufio.NewScanner(resp.Body)
  15. scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
  16. if atEOF && len(data) == 0 {
  17. return 0, nil, nil
  18. }
  19. if i := strings.Index(string(data), "\n"); i >= 0 {
  20. return i + 1, data[0:i], nil
  21. }
  22. if atEOF {
  23. return len(data), data, nil
  24. }
  25. return 0, nil, nil
  26. })
  27. dataChan := make(chan string)
  28. stopChan := make(chan bool)
  29. go func() {
  30. var streamItems []string
  31. for scanner.Scan() {
  32. data := scanner.Text()
  33. if len(data) < 6 { // ignore blank line or wrong format
  34. continue
  35. }
  36. if data[:6] != "data: " && data[:6] != "[DONE]" {
  37. continue
  38. }
  39. dataChan <- data
  40. data = data[6:]
  41. if !strings.HasPrefix(data, "[DONE]") {
  42. streamItems = append(streamItems, data)
  43. }
  44. }
  45. streamResp := "[" + strings.Join(streamItems, ",") + "]"
  46. switch relayMode {
  47. case RelayModeChatCompletions:
  48. var streamResponses []ChatCompletionsStreamResponseSimple
  49. err := json.Unmarshal(common.StringToByteSlice(streamResp), &streamResponses)
  50. if err != nil {
  51. common.SysError("error unmarshalling stream response: " + err.Error())
  52. return // just ignore the error
  53. }
  54. for _, streamResponse := range streamResponses {
  55. for _, choice := range streamResponse.Choices {
  56. responseTextBuilder.WriteString(choice.Delta.Content)
  57. }
  58. }
  59. case RelayModeCompletions:
  60. var streamResponses []CompletionsStreamResponse
  61. err := json.Unmarshal(common.StringToByteSlice(streamResp), &streamResponses)
  62. if err != nil {
  63. common.SysError("error unmarshalling stream response: " + err.Error())
  64. return // just ignore the error
  65. }
  66. for _, streamResponse := range streamResponses {
  67. for _, choice := range streamResponse.Choices {
  68. responseTextBuilder.WriteString(choice.Text)
  69. }
  70. }
  71. }
  72. stopChan <- true
  73. }()
  74. setEventStreamHeaders(c)
  75. c.Stream(func(w io.Writer) bool {
  76. select {
  77. case data := <-dataChan:
  78. if strings.HasPrefix(data, "data: [DONE]") {
  79. data = data[:12]
  80. }
  81. // some implementations may add \r at the end of data
  82. data = strings.TrimSuffix(data, "\r")
  83. c.Render(-1, common.CustomEvent{Data: data})
  84. return true
  85. case <-stopChan:
  86. return false
  87. }
  88. })
  89. err := resp.Body.Close()
  90. if err != nil {
  91. return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), ""
  92. }
  93. return nil, responseTextBuilder.String()
  94. }
  95. func openaiHandler(c *gin.Context, resp *http.Response, promptTokens int, model string) (*OpenAIErrorWithStatusCode, *Usage) {
  96. var textResponse TextResponse
  97. responseBody, err := io.ReadAll(resp.Body)
  98. if err != nil {
  99. return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
  100. }
  101. err = resp.Body.Close()
  102. if err != nil {
  103. return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
  104. }
  105. err = json.Unmarshal(responseBody, &textResponse)
  106. if err != nil {
  107. return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
  108. }
  109. if textResponse.Error.Type != "" {
  110. return &OpenAIErrorWithStatusCode{
  111. OpenAIError: textResponse.Error,
  112. StatusCode: resp.StatusCode,
  113. }, nil
  114. }
  115. // Reset response body
  116. resp.Body = io.NopCloser(bytes.NewBuffer(responseBody))
  117. // We shouldn't set the header before we parse the response body, because the parse part may fail.
  118. // And then we will have to send an error response, but in this case, the header has already been set.
  119. // So the httpClient will be confused by the response.
  120. // For example, Postman will report error, and we cannot check the response at all.
  121. for k, v := range resp.Header {
  122. c.Writer.Header().Set(k, v[0])
  123. }
  124. c.Writer.WriteHeader(resp.StatusCode)
  125. _, err = io.Copy(c.Writer, resp.Body)
  126. if err != nil {
  127. return errorWrapper(err, "copy_response_body_failed", http.StatusInternalServerError), nil
  128. }
  129. err = resp.Body.Close()
  130. if err != nil {
  131. return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
  132. }
  133. if textResponse.Usage.TotalTokens == 0 {
  134. completionTokens := 0
  135. for _, choice := range textResponse.Choices {
  136. completionTokens += countTokenText(string(choice.Message.Content), model)
  137. }
  138. textResponse.Usage = Usage{
  139. PromptTokens: promptTokens,
  140. CompletionTokens: completionTokens,
  141. TotalTokens: promptTokens + completionTokens,
  142. }
  143. }
  144. return nil, &textResponse.Usage
  145. }