Bladeren bron

fix: Invalid type for 'input[x].summary': expected an array of objects, but got null instead

CaIon 6 maanden geleden
bovenliggende
commit
e23f01f8d5
5 gewijzigde bestanden met toevoegingen van 73 en 28 verwijderingen
  1. 22 0
      common/json.go
  2. 40 25
      dto/openai_request.go
  3. 8 2
      relay/channel/openai/adaptor.go
  4. 2 0
      relay/channel/openai/relay_responses.go
  5. 1 1
      relay/common/relay_info.go

+ 22 - 0
common/json.go

@@ -20,3 +20,25 @@ func DecodeJson(reader *bytes.Reader, v any) error {
 func Marshal(v any) ([]byte, error) {
 	return json.Marshal(v)
 }
+
+func GetJsonType(data json.RawMessage) string {
+	data = bytes.TrimSpace(data)
+	if len(data) == 0 {
+		return "unknown"
+	}
+	firstChar := bytes.TrimSpace(data)[0]
+	switch firstChar {
+	case '{':
+		return "object"
+	case '[':
+		return "array"
+	case '"':
+		return "string"
+	case 't', 'f':
+		return "boolean"
+	case 'n':
+		return "null"
+	default:
+		return "number"
+	}
+}

+ 40 - 25
dto/openai_request.go

@@ -760,27 +760,27 @@ type WebSearchOptions struct {
 
 // https://platform.openai.com/docs/api-reference/responses/create
 type OpenAIResponsesRequest struct {
-	Model              string           `json:"model"`
-	Input              any              `json:"input,omitempty"`
-	Include            json.RawMessage  `json:"include,omitempty"`
-	Instructions       json.RawMessage  `json:"instructions,omitempty"`
-	MaxOutputTokens    uint             `json:"max_output_tokens,omitempty"`
-	Metadata           json.RawMessage  `json:"metadata,omitempty"`
-	ParallelToolCalls  bool             `json:"parallel_tool_calls,omitempty"`
-	PreviousResponseID string           `json:"previous_response_id,omitempty"`
-	Reasoning          *Reasoning       `json:"reasoning,omitempty"`
-	ServiceTier        string           `json:"service_tier,omitempty"`
-	Store              bool             `json:"store,omitempty"`
-	Stream             bool             `json:"stream,omitempty"`
-	Temperature        float64          `json:"temperature,omitempty"`
-	Text               json.RawMessage  `json:"text,omitempty"`
-	ToolChoice         json.RawMessage  `json:"tool_choice,omitempty"`
-	Tools              []map[string]any `json:"tools,omitempty"` // 需要处理的参数很少,MCP 参数太多不确定,所以用 map
-	TopP               float64          `json:"top_p,omitempty"`
-	Truncation         string           `json:"truncation,omitempty"`
-	User               string           `json:"user,omitempty"`
-	MaxToolCalls       uint             `json:"max_tool_calls,omitempty"`
-	Prompt             json.RawMessage  `json:"prompt,omitempty"`
+	Model              string          `json:"model"`
+	Input              json.RawMessage `json:"input,omitempty"`
+	Include            json.RawMessage `json:"include,omitempty"`
+	Instructions       json.RawMessage `json:"instructions,omitempty"`
+	MaxOutputTokens    uint            `json:"max_output_tokens,omitempty"`
+	Metadata           json.RawMessage `json:"metadata,omitempty"`
+	ParallelToolCalls  bool            `json:"parallel_tool_calls,omitempty"`
+	PreviousResponseID string          `json:"previous_response_id,omitempty"`
+	Reasoning          *Reasoning      `json:"reasoning,omitempty"`
+	ServiceTier        string          `json:"service_tier,omitempty"`
+	Store              bool            `json:"store,omitempty"`
+	Stream             bool            `json:"stream,omitempty"`
+	Temperature        float64         `json:"temperature,omitempty"`
+	Text               json.RawMessage `json:"text,omitempty"`
+	ToolChoice         json.RawMessage `json:"tool_choice,omitempty"`
+	Tools              json.RawMessage `json:"tools,omitempty"` // 需要处理的参数很少,MCP 参数太多不确定,所以用 map
+	TopP               float64         `json:"top_p,omitempty"`
+	Truncation         string          `json:"truncation,omitempty"`
+	User               string          `json:"user,omitempty"`
+	MaxToolCalls       uint            `json:"max_tool_calls,omitempty"`
+	Prompt             json.RawMessage `json:"prompt,omitempty"`
 }
 
 func (r *OpenAIResponsesRequest) GetTokenCountMeta() *types.TokenCountMeta {
@@ -832,8 +832,7 @@ func (r *OpenAIResponsesRequest) GetTokenCountMeta() *types.TokenCountMeta {
 	}
 
 	if len(r.Tools) > 0 {
-		toolStr, _ := common.Marshal(r.Tools)
-		texts = append(texts, string(toolStr))
+		texts = append(texts, string(r.Tools))
 	}
 
 	return &types.TokenCountMeta{
@@ -853,6 +852,14 @@ func (r *OpenAIResponsesRequest) SetModelName(modelName string) {
 	}
 }
 
+func (r *OpenAIResponsesRequest) GetToolsMap() []map[string]any {
+	var toolsMap []map[string]any
+	if len(r.Tools) > 0 {
+		_ = common.Unmarshal(r.Tools, &toolsMap)
+	}
+	return toolsMap
+}
+
 type Reasoning struct {
 	Effort  string `json:"effort,omitempty"`
 	Summary string `json:"summary,omitempty"`
@@ -879,13 +886,21 @@ func (r *OpenAIResponsesRequest) ParseInput() []MediaInput {
 	var inputs []MediaInput
 
 	// Try string first
-	if str, ok := r.Input.(string); ok {
+	// if str, ok := common.GetJsonType(r.Input); ok {
+	// 	inputs = append(inputs, MediaInput{Type: "input_text", Text: str})
+	// 	return inputs
+	// }
+	if common.GetJsonType(r.Input) == "string" {
+		var str string
+		_ = common.Unmarshal(r.Input, &str)
 		inputs = append(inputs, MediaInput{Type: "input_text", Text: str})
 		return inputs
 	}
 
 	// Try array of parts
-	if array, ok := r.Input.([]any); ok {
+	if common.GetJsonType(r.Input) == "array" {
+		var array []any
+		_ = common.Unmarshal(r.Input, &array)
 		for _, itemAny := range array {
 			// Already parsed MediaInput
 			if media, ok := itemAny.(MediaInput); ok {

+ 8 - 2
relay/channel/openai/adaptor.go

@@ -537,8 +537,14 @@ func detectImageMimeType(filename string) string {
 func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
 	//  转换模型推理力度后缀
 	effort, originModel := parseReasoningEffortFromModelSuffix(request.Model)
-	if effort != "" && request.Reasoning != nil {
-		request.Reasoning.Effort = effort
+	if effort != "" {
+		if request.Reasoning == nil {
+			request.Reasoning = &dto.Reasoning{
+				Effort: effort,
+			}
+		} else {
+			request.Reasoning.Effort = effort
+		}
 		request.Model = originModel
 	}
 	return request, nil

+ 2 - 0
relay/channel/openai/relay_responses.go

@@ -92,6 +92,8 @@ func OaiResponsesStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp
 					}
 				}
 			}
+		} else {
+			logger.LogError(c, "failed to unmarshal stream response: "+err.Error())
 		}
 		return true
 	})

+ 1 - 1
relay/common/relay_info.go

@@ -313,7 +313,7 @@ func GenRelayInfoResponses(c *gin.Context, request *dto.OpenAIResponsesRequest)
 		BuiltInTools: make(map[string]*BuildInToolInfo),
 	}
 	if len(request.Tools) > 0 {
-		for _, tool := range request.Tools {
+		for _, tool := range request.GetToolsMap() {
 			toolType := common.Interface2String(tool["type"])
 			info.ResponsesUsageInfo.BuiltInTools[toolType] = &BuildInToolInfo{
 				ToolName:  toolType,