Просмотр исходного кода

Merge remote-tracking branch 'origin/alpha' into alpha

t0ng7u 7 месяцев назад
Родитель
Сommit
722b187f83
5 измененных файлов с 35 добавлено и 20 удалено
  1. 1 1
      common/page_info.go
  2. 2 0
      dto/openai_request.go
  3. 5 1
      relay/channel/ollama/dto.go
  4. 6 2
      relay/channel/ollama/relay-ollama.go
  5. 21 16
      types/error.go

+ 1 - 1
common/page_info.go

@@ -41,7 +41,7 @@ func (p *PageInfo) SetItems(items any) {
 func GetPageQuery(c *gin.Context) *PageInfo {
 	pageInfo := &PageInfo{}
 	// 手动获取并处理每个参数
-	if page, err := strconv.Atoi(c.Query("page")); err == nil {
+	if page, err := strconv.Atoi(c.Query("p")); err == nil {
 		pageInfo.Page = page
 	}
 	if pageSize, err := strconv.Atoi(c.Query("page_size")); err == nil {

+ 2 - 0
dto/openai_request.go

@@ -62,6 +62,8 @@ type GeneralOpenAIRequest struct {
 	Reasoning json.RawMessage `json:"reasoning,omitempty"`
 	// Ali Qwen Params
 	VlHighResolutionImages json.RawMessage `json:"vl_high_resolution_images,omitempty"`
+	// 用匿名参数接收额外参数,例如ollama的think参数在此接收
+	Extra map[string]json.RawMessage `json:"-"`
 }
 
 func (r *GeneralOpenAIRequest) ToMap() map[string]any {

+ 5 - 1
relay/channel/ollama/dto.go

@@ -1,6 +1,9 @@
 package ollama
 
-import "one-api/dto"
+import (
+	"encoding/json"
+	"one-api/dto"
+)
 
 type OllamaRequest struct {
 	Model            string                `json:"model,omitempty"`
@@ -19,6 +22,7 @@ type OllamaRequest struct {
 	Suffix           any                   `json:"suffix,omitempty"`
 	StreamOptions    *dto.StreamOptions    `json:"stream_options,omitempty"`
 	Prompt           any                   `json:"prompt,omitempty"`
+	Think            json.RawMessage       `json:"think,omitempty"`
 }
 
 type Options struct {

+ 6 - 2
relay/channel/ollama/relay-ollama.go

@@ -50,7 +50,7 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) (*OllamaRequest, err
 	} else {
 		Stop, _ = request.Stop.([]string)
 	}
-	return &OllamaRequest{
+	ollamaRequest := &OllamaRequest{
 		Model:            request.Model,
 		Messages:         messages,
 		Stream:           request.Stream,
@@ -67,7 +67,11 @@ func requestOpenAI2Ollama(request dto.GeneralOpenAIRequest) (*OllamaRequest, err
 		Prompt:           request.Prompt,
 		StreamOptions:    request.StreamOptions,
 		Suffix:           request.Suffix,
-	}, nil
+	}
+	if think, ok := request.Extra["think"]; ok {
+		ollamaRequest.Think = think
+	}
+	return ollamaRequest, nil
 }
 
 func requestOpenAI2Embeddings(request dto.EmbeddingRequest) *OllamaEmbeddingRequest {

+ 21 - 16
types/error.go

@@ -105,23 +105,25 @@ func (e *NewAPIError) SetMessage(message string) {
 func (e *NewAPIError) ToOpenAIError() OpenAIError {
 	switch e.ErrorType {
 	case ErrorTypeOpenAIError:
-		return e.RelayError.(OpenAIError)
-	case ErrorTypeClaudeError:
-		claudeError := e.RelayError.(ClaudeError)
-		return OpenAIError{
-			Message: e.Error(),
-			Type:    claudeError.Type,
-			Param:   "",
-			Code:    e.errorCode,
+		if openAIError, ok := e.RelayError.(OpenAIError); ok {
+			return openAIError
 		}
-	default:
-		return OpenAIError{
-			Message: e.Error(),
-			Type:    string(e.ErrorType),
-			Param:   "",
-			Code:    e.errorCode,
+	case ErrorTypeClaudeError:
+		if claudeError, ok := e.RelayError.(ClaudeError); ok {
+			return OpenAIError{
+				Message: e.Error(),
+				Type:    claudeError.Type,
+				Param:   "",
+				Code:    e.errorCode,
+			}
 		}
 	}
+	return OpenAIError{
+		Message: e.Error(),
+		Type:    string(e.ErrorType),
+		Param:   "",
+		Code:    e.errorCode,
+	}
 }
 
 func (e *NewAPIError) ToClaudeError() ClaudeError {
@@ -162,8 +164,11 @@ func NewOpenAIError(err error, errorCode ErrorCode, statusCode int) *NewAPIError
 
 func NewErrorWithStatusCode(err error, errorCode ErrorCode, statusCode int) *NewAPIError {
 	return &NewAPIError{
-		Err:        err,
-		RelayError: nil,
+		Err: err,
+		RelayError: OpenAIError{
+			Message: err.Error(),
+			Type:    string(errorCode),
+		},
 		ErrorType:  ErrorTypeNewAPIError,
 		StatusCode: statusCode,
 		errorCode:  errorCode,