|
@@ -9,6 +9,7 @@ import (
|
|
|
"one-api/common"
|
|
"one-api/common"
|
|
|
"one-api/constant"
|
|
"one-api/constant"
|
|
|
"one-api/dto"
|
|
"one-api/dto"
|
|
|
|
|
+ "one-api/relay/channel/openai"
|
|
|
relaycommon "one-api/relay/common"
|
|
relaycommon "one-api/relay/common"
|
|
|
"one-api/relay/helper"
|
|
"one-api/relay/helper"
|
|
|
"one-api/service"
|
|
"one-api/service"
|
|
@@ -736,7 +737,7 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.C
|
|
|
choice := dto.ChatCompletionsStreamResponseChoice{
|
|
choice := dto.ChatCompletionsStreamResponseChoice{
|
|
|
Index: int(candidate.Index),
|
|
Index: int(candidate.Index),
|
|
|
Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
|
|
Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
|
|
|
- Role: "assistant",
|
|
|
|
|
|
|
+ //Role: "assistant",
|
|
|
},
|
|
},
|
|
|
}
|
|
}
|
|
|
var texts []string
|
|
var texts []string
|
|
@@ -798,6 +799,27 @@ func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.C
|
|
|
return &response, isStop, hasImage
|
|
return &response, isStop, hasImage
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
|
|
+func handleStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
|
|
|
|
|
+ streamData, err := common.Marshal(resp)
|
|
|
|
|
+ if err != nil {
|
|
|
|
|
+ return fmt.Errorf("failed to marshal stream response: %w", err)
|
|
|
|
|
+ }
|
|
|
|
|
+ err = openai.HandleStreamFormat(c, info, string(streamData), info.ChannelSetting.ForceFormat, info.ChannelSetting.ThinkingToContent)
|
|
|
|
|
+ if err != nil {
|
|
|
|
|
+ return fmt.Errorf("failed to handle stream format: %w", err)
|
|
|
|
|
+ }
|
|
|
|
|
+ return nil
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
|
|
+func handleFinalStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.ChatCompletionsStreamResponse) error {
|
|
|
|
|
+ streamData, err := common.Marshal(resp)
|
|
|
|
|
+ if err != nil {
|
|
|
|
|
+ return fmt.Errorf("failed to marshal stream response: %w", err)
|
|
|
|
|
+ }
|
|
|
|
|
+ openai.HandleFinalResponse(c, info, string(streamData), resp.Id, resp.Created, resp.Model, resp.GetSystemFingerprint(), resp.Usage, info.ShouldIncludeUsage)
|
|
|
|
|
+ return nil
|
|
|
|
|
+}
|
|
|
|
|
+
|
|
|
func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
|
|
func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
|
|
|
// responseText := ""
|
|
// responseText := ""
|
|
|
id := helper.GetResponseID(c)
|
|
id := helper.GetResponseID(c)
|
|
@@ -805,6 +827,8 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
|
|
var usage = &dto.Usage{}
|
|
var usage = &dto.Usage{}
|
|
|
var imageCount int
|
|
var imageCount int
|
|
|
|
|
|
|
|
|
|
+ respCount := 0
|
|
|
|
|
+
|
|
|
helper.StreamScannerHandler(c, resp, info, func(data string) bool {
|
|
helper.StreamScannerHandler(c, resp, info, func(data string) bool {
|
|
|
var geminiResponse GeminiChatResponse
|
|
var geminiResponse GeminiChatResponse
|
|
|
err := common.UnmarshalJsonStr(data, &geminiResponse)
|
|
err := common.UnmarshalJsonStr(data, &geminiResponse)
|
|
@@ -833,18 +857,31 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
}
|
|
}
|
|
|
- err = helper.ObjectData(c, response)
|
|
|
|
|
|
|
+
|
|
|
|
|
+ if respCount == 0 {
|
|
|
|
|
+ // send first response
|
|
|
|
|
+ err = handleStream(c, info, helper.GenerateStartEmptyResponse(id, createAt, info.UpstreamModelName, nil))
|
|
|
|
|
+ if err != nil {
|
|
|
|
|
+ common.LogError(c, err.Error())
|
|
|
|
|
+ }
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ err = handleStream(c, info, response)
|
|
|
if err != nil {
|
|
if err != nil {
|
|
|
common.LogError(c, err.Error())
|
|
common.LogError(c, err.Error())
|
|
|
}
|
|
}
|
|
|
if isStop {
|
|
if isStop {
|
|
|
- response := helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
|
|
|
|
|
- helper.ObjectData(c, response)
|
|
|
|
|
|
|
+ _ = handleStream(c, info, helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop))
|
|
|
}
|
|
}
|
|
|
|
|
+ respCount++
|
|
|
return true
|
|
return true
|
|
|
})
|
|
})
|
|
|
|
|
|
|
|
- var response *dto.ChatCompletionsStreamResponse
|
|
|
|
|
|
|
+ if respCount == 0 {
|
|
|
|
|
+ // 空补全,报错不计费
|
|
|
|
|
+ // empty response, throw an error
|
|
|
|
|
+ return nil, types.NewOpenAIError(errors.New("no response received from Gemini API"), types.ErrorCodeEmptyResponse, http.StatusInternalServerError)
|
|
|
|
|
+ }
|
|
|
|
|
|
|
|
if imageCount != 0 {
|
|
if imageCount != 0 {
|
|
|
if usage.CompletionTokens == 0 {
|
|
if usage.CompletionTokens == 0 {
|
|
@@ -855,14 +892,14 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
|
|
|
usage.PromptTokensDetails.TextTokens = usage.PromptTokens
|
|
usage.PromptTokensDetails.TextTokens = usage.PromptTokens
|
|
|
usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
|
|
usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
|
|
|
|
|
|
|
|
- if info.ShouldIncludeUsage {
|
|
|
|
|
- response = helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
|
|
|
|
|
- err := helper.ObjectData(c, response)
|
|
|
|
|
- if err != nil {
|
|
|
|
|
- common.SysError("send final response failed: " + err.Error())
|
|
|
|
|
- }
|
|
|
|
|
|
|
+ response := helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
|
|
|
|
|
+ err := handleFinalStream(c, info, response)
|
|
|
|
|
+ if err != nil {
|
|
|
|
|
+ common.SysError("send final response failed: " + err.Error())
|
|
|
}
|
|
}
|
|
|
- helper.Done(c)
|
|
|
|
|
|
|
+ //if info.RelayFormat == relaycommon.RelayFormatOpenAI {
|
|
|
|
|
+ // helper.Done(c)
|
|
|
|
|
+ //}
|
|
|
//resp.Body.Close()
|
|
//resp.Body.Close()
|
|
|
return usage, nil
|
|
return usage, nil
|
|
|
}
|
|
}
|