Просмотр исходного кода

refactor: 更新请求转换逻辑,优化工具调用解析

somnifex 5 месяцев назад
Родитель
Сommit
7d6ba52d85

+ 2 - 1
relay/channel/ollama/adaptor.go

@@ -32,7 +32,8 @@ func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayIn
 	openaiRequest.(*dto.GeneralOpenAIRequest).StreamOptions = &dto.StreamOptions{
 		IncludeUsage: true,
 	}
-	return requestOpenAI2Ollama(c, openaiRequest.(*dto.GeneralOpenAIRequest))
+	// map to ollama chat request (Claude -> OpenAI -> Ollama chat)
+	return openAIChatToOllamaChat(c, openaiRequest.(*dto.GeneralOpenAIRequest))
 }
 
 func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {

+ 0 - 1
relay/channel/ollama/dto.go

@@ -2,7 +2,6 @@ package ollama
 
 import (
 	"encoding/json"
-	"one-api/dto"
 )
 
 // OllamaChatMessage represents a single chat message

+ 14 - 12
relay/channel/ollama/relay-ollama.go

@@ -101,18 +101,21 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
 		// history tool call result message
 		if m.Role == "tool" && m.Name != nil { cm.ToolName = *m.Name }
 		// tool calls from assistant previous message
-		if len(m.ToolCalls)>0 {
-			calls := make([]OllamaToolCall,0,len(m.ToolCalls))
-			for _, tc := range m.ToolCalls {
-				var args interface{}
-				if tc.Function.Arguments != "" { _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) }
-				oc := OllamaToolCall{}
-				oc.Function.Name = tc.Function.Name
-				if args==nil { args = map[string]any{} }
-				oc.Function.Arguments = args
-				calls = append(calls, oc)
+		if m.ToolCalls != nil && len(m.ToolCalls) > 0 {
+			parsed := m.ParseToolCalls()
+			if len(parsed) > 0 {
+				calls := make([]OllamaToolCall,0,len(parsed))
+				for _, tc := range parsed {
+					var args interface{}
+					if tc.Function.Arguments != "" { _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) }
+					if args==nil { args = map[string]any{} }
+					oc := OllamaToolCall{}
+					oc.Function.Name = tc.Function.Name
+					oc.Function.Arguments = args
+					calls = append(calls, oc)
+				}
+				cm.ToolCalls = calls
 			}
-			cm.ToolCalls = calls
 		}
 		chatReq.Messages = append(chatReq.Messages, cm)
 	}
@@ -165,7 +168,6 @@ func requestOpenAI2Embeddings(r dto.EmbeddingRequest) *OllamaEmbeddingRequest {
 	opts := map[string]any{}
 	if r.Temperature != nil { opts["temperature"] = r.Temperature }
 	if r.TopP != 0 { opts["top_p"] = r.TopP }
-	if r.TopK != 0 { opts["top_k"] = r.TopK }
 	if r.FrequencyPenalty != 0 { opts["frequency_penalty"] = r.FrequencyPenalty }
 	if r.PresencePenalty != 0 { opts["presence_penalty"] = r.PresencePenalty }
 	if r.Seed != 0 { opts["seed"] = int(r.Seed) }

+ 0 - 1
relay/channel/ollama/stream.go

@@ -87,7 +87,6 @@ func ollamaStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http
             // delta content
             var content string
             if chunk.Message != nil { content = chunk.Message.Content } else { content = chunk.Response }
-            if content != "" { aggregatedText.WriteString(content) }
             delta := dto.ChatCompletionsStreamResponse{
                 Id:      responseId,
                 Object:  "chat.completion.chunk",