Просмотр исходного кода

feat: Enhance Ollama channel support with additional request parameters #771

1808837298@qq.com 1 год назад
Родитель
Сommit
a4b2b9c935
3 измененных файлов с 21 добавлено и 15 удалено
  1. 17 14
      relay/channel/ollama/dto.go
  2. 3 0
      relay/channel/ollama/relay-ollama.go
  3. 1 1
      relay/common/relay_info.go

+ 17 - 14
relay/channel/ollama/dto.go

@@ -3,18 +3,21 @@ package ollama
 import "one-api/dto"
 
 type OllamaRequest struct {
-	Model            string         `json:"model,omitempty"`
-	Messages         []dto.Message  `json:"messages,omitempty"`
-	Stream           bool           `json:"stream,omitempty"`
-	Temperature      *float64       `json:"temperature,omitempty"`
-	Seed             float64        `json:"seed,omitempty"`
-	Topp             float64        `json:"top_p,omitempty"`
-	TopK             int            `json:"top_k,omitempty"`
-	Stop             any            `json:"stop,omitempty"`
-	Tools            []dto.ToolCall `json:"tools,omitempty"`
-	ResponseFormat   any            `json:"response_format,omitempty"`
-	FrequencyPenalty float64        `json:"frequency_penalty,omitempty"`
-	PresencePenalty  float64        `json:"presence_penalty,omitempty"`
+	Model            string             `json:"model,omitempty"`
+	Messages         []dto.Message      `json:"messages,omitempty"`
+	Stream           bool               `json:"stream,omitempty"`
+	Temperature      *float64           `json:"temperature,omitempty"`
+	Seed             float64            `json:"seed,omitempty"`
+	Topp             float64            `json:"top_p,omitempty"`
+	TopK             int                `json:"top_k,omitempty"`
+	Stop             any                `json:"stop,omitempty"`
+	Tools            []dto.ToolCall     `json:"tools,omitempty"`
+	ResponseFormat   any                `json:"response_format,omitempty"`
+	FrequencyPenalty float64            `json:"frequency_penalty,omitempty"`
+	PresencePenalty  float64            `json:"presence_penalty,omitempty"`
+	Suffix           any                `json:"suffix,omitempty"`
+	StreamOptions    *dto.StreamOptions `json:"stream_options,omitempty"`
+	Prompt           any                `json:"prompt,omitempty"`
 }
 
 type Options struct {
@@ -35,7 +38,7 @@ type OllamaEmbeddingRequest struct {
 }
 
 type OllamaEmbeddingResponse struct {
-	Error     string    `json:"error,omitempty"`
-	Model     string    `json:"model"`
+	Error     string      `json:"error,omitempty"`
+	Model     string      `json:"model"`
 	Embedding [][]float64 `json:"embeddings,omitempty"`
 }

+ 3 - 0
relay/channel/ollama/relay-ollama.go

@@ -39,6 +39,9 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) *OllamaRequest {
 		ResponseFormat:   request.ResponseFormat,
 		FrequencyPenalty: request.FrequencyPenalty,
 		PresencePenalty:  request.PresencePenalty,
+		Prompt:           request.Prompt,
+		StreamOptions:    request.StreamOptions,
+		Suffix:           request.Suffix,
 	}
 }
 

+ 1 - 1
relay/common/relay_info.go

@@ -113,7 +113,7 @@ func GenRelayInfo(c *gin.Context) *RelayInfo {
 	if info.ChannelType == common.ChannelTypeOpenAI || info.ChannelType == common.ChannelTypeAnthropic ||
 		info.ChannelType == common.ChannelTypeAws || info.ChannelType == common.ChannelTypeGemini ||
 		info.ChannelType == common.ChannelCloudflare || info.ChannelType == common.ChannelTypeAzure ||
-	        info.ChannelType == common.ChannelTypeVolcEngine {
+		info.ChannelType == common.ChannelTypeVolcEngine || info.ChannelType == common.ChannelTypeOllama {
 		info.SupportStreamOptions = true
 	}
 	return info