Browse Source

fix: claude max_tokens

CaIon 1 year ago
parent
commit
53e8790024

+ 9 - 10
relay/channel/claude/dto.go

@@ -24,16 +24,15 @@ type ClaudeMessage struct {
 }
 }
 
 
 type ClaudeRequest struct {
 type ClaudeRequest struct {
-	Model             string          `json:"model"`
-	Prompt            string          `json:"prompt,omitempty"`
-	System            string          `json:"system,omitempty"`
-	Messages          []ClaudeMessage `json:"messages,omitempty"`
-	MaxTokensToSample uint            `json:"max_tokens_to_sample,omitempty"`
-	MaxTokens         uint            `json:"max_tokens,omitempty"`
-	StopSequences     []string        `json:"stop_sequences,omitempty"`
-	Temperature       float64         `json:"temperature,omitempty"`
-	TopP              float64         `json:"top_p,omitempty"`
-	TopK              int             `json:"top_k,omitempty"`
+	Model         string          `json:"model"`
+	Prompt        string          `json:"prompt,omitempty"`
+	System        string          `json:"system,omitempty"`
+	Messages      []ClaudeMessage `json:"messages,omitempty"`
+	MaxTokens     uint            `json:"max_tokens,omitempty"`
+	StopSequences []string        `json:"stop_sequences,omitempty"`
+	Temperature   float64         `json:"temperature,omitempty"`
+	TopP          float64         `json:"top_p,omitempty"`
+	TopK          int             `json:"top_k,omitempty"`
 	//ClaudeMetadata    `json:"metadata,omitempty"`
 	//ClaudeMetadata    `json:"metadata,omitempty"`
 	Stream bool `json:"stream,omitempty"`
 	Stream bool `json:"stream,omitempty"`
 }
 }

+ 10 - 10
relay/channel/claude/relay-claude.go

@@ -28,17 +28,17 @@ func stopReasonClaude2OpenAI(reason string) string {
 
 
 func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeRequest {
 func RequestOpenAI2ClaudeComplete(textRequest dto.GeneralOpenAIRequest) *ClaudeRequest {
 	claudeRequest := ClaudeRequest{
 	claudeRequest := ClaudeRequest{
-		Model:             textRequest.Model,
-		Prompt:            "",
-		MaxTokensToSample: textRequest.MaxTokens,
-		StopSequences:     nil,
-		Temperature:       textRequest.Temperature,
-		TopP:              textRequest.TopP,
-		TopK:              textRequest.TopK,
-		Stream:            textRequest.Stream,
+		Model:         textRequest.Model,
+		Prompt:        "",
+		MaxTokens:     textRequest.MaxTokens,
+		StopSequences: nil,
+		Temperature:   textRequest.Temperature,
+		TopP:          textRequest.TopP,
+		TopK:          textRequest.TopK,
+		Stream:        textRequest.Stream,
 	}
 	}
-	if claudeRequest.MaxTokensToSample == 0 {
-		claudeRequest.MaxTokensToSample = 1000000
+	if claudeRequest.MaxTokens == 0 {
+		claudeRequest.MaxTokens = 4096
 	}
 	}
 	prompt := ""
 	prompt := ""
 	for _, message := range textRequest.Messages {
 	for _, message := range textRequest.Messages {

+ 1 - 1
web/src/constants/channel.constants.js

@@ -26,7 +26,7 @@ export const CHANNEL_OPTIONS = [
     key: 33,
     key: 33,
     text: 'AWS Claude',
     text: 'AWS Claude',
     value: 33,
     value: 33,
-    color: 'black',
+    color: 'indigo',
     label: 'AWS Claude',
     label: 'AWS Claude',
   },
   },
   {
   {