Просмотр исходного кода

Merge branch 'Calcium-Ion:main' into main

Yan Tau 1 год назад
Родитель
Сommit
b5bb708072

+ 1 - 1
README.md

@@ -66,7 +66,7 @@
 - `GET_MEDIA_TOKEN_NOT_STREAM`:是否在非流(`stream=false`)情况下统计图片token,默认为 `true`。
 - `UPDATE_TASK`:是否更新异步任务(Midjourney、Suno),默认为 `true`,关闭后将不会更新任务进度。
 - `GEMINI_MODEL_MAP`:Gemini模型指定版本(v1/v1beta),使用“模型:版本”指定,","分隔,例如:-e GEMINI_MODEL_MAP="gemini-1.5-pro-latest:v1beta,gemini-1.5-pro-001:v1beta",为空则使用默认配置
-
+- `COHERE_SAFETY_SETTING`:Cohere模型[安全设置](https://docs.cohere.com/docs/safety-modes#overview),可选值为 `NONE`, `CONTEXTUAL`,`STRICT`,默认为 `NONE`。
 ## 部署
 ### 部署要求
 - 本地数据库(默认):SQLite(Docker 部署默认使用 SQLite,必须挂载 `/data` 目录到宿主机)

+ 3 - 0
common/constants.go

@@ -112,6 +112,9 @@ var RelayTimeout = GetEnvOrDefault("RELAY_TIMEOUT", 0) // unit is second
 
 var GeminiSafetySetting = GetEnvOrDefaultString("GEMINI_SAFETY_SETTING", "BLOCK_NONE")
 
+// https://docs.cohere.com/docs/safety-modes Type; NONE/CONTEXTUAL/STRICT
+var CohereSafetySetting = GetEnvOrDefaultString("COHERE_SAFETY_SETTING", "NONE")
+
 const (
 	RequestIdKey = "X-Oneapi-Request-Id"
 )

+ 1 - 0
dto/text_response.go

@@ -34,6 +34,7 @@ type OpenAITextResponseChoice struct {
 
 type OpenAITextResponse struct {
 	Id      string                     `json:"id"`
+	Model   string                     `json:"model"`
 	Object  string                     `json:"object"`
 	Created int64                      `json:"created"`
 	Choices []OpenAITextResponseChoice `json:"choices"`

+ 1 - 0
relay/channel/claude/relay-claude.go

@@ -395,6 +395,7 @@ func ResponseClaude2OpenAI(reqMode int, claudeResponse *ClaudeResponse) *dto.Ope
 	if len(tools) > 0 {
 		choice.Message.ToolCalls = tools
 	}
+	fullTextResponse.Model = claudeResponse.Model
 	choices = append(choices, choice)
 	fullTextResponse.Choices = choices
 	return &fullTextResponse

+ 1 - 0
relay/channel/cohere/dto.go

@@ -8,6 +8,7 @@ type CohereRequest struct {
 	Message     string        `json:"message"`
 	Stream      bool          `json:"stream"`
 	MaxTokens   int           `json:"max_tokens"`
+	SafetyMode  string        `json:"safety_mode,omitempty"`
 }
 
 type ChatHistory struct {

+ 4 - 0
relay/channel/cohere/relay-cohere.go

@@ -23,6 +23,9 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
 		Stream:      textRequest.Stream,
 		MaxTokens:   textRequest.GetMaxTokens(),
 	}
+	if common.CohereSafetySetting != "NONE" {
+		cohereReq.SafetyMode = common.CohereSafetySetting
+	}
 	if cohereReq.MaxTokens == 0 {
 		cohereReq.MaxTokens = 4000
 	}
@@ -44,6 +47,7 @@ func requestOpenAI2Cohere(textRequest dto.GeneralOpenAIRequest) *CohereRequest {
 			})
 		}
 	}
+
 	return &cohereReq
 }
 

+ 1 - 1
service/token_counter.go

@@ -52,7 +52,7 @@ func InitTokenEncoders() {
 }
 
 func getModelDefaultTokenEncoder(model string) *tiktoken.Tiktoken {
-	if strings.HasPrefix(model, "gpt-4o") {
+	if strings.HasPrefix(model, "gpt-4o") || strings.HasPrefix(model, "chatgpt-4o") {
 		return cl200kTokenEncoder
 	}
 	return defaultTokenEncoder

+ 8 - 8
web/src/components/TokensTable.js

@@ -225,14 +225,14 @@ const TokensTable = () => {
                     onOpenLink('next-mj', record.key);
                   },
                 },
-                {
-                  node: 'item',
-                  key: 'lobe',
-                  name: 'Lobe Chat',
-                  onClick: () => {
-                    onOpenLink('lobe', record.key);
-                  },
-                },
+                // {
+                //   node: 'item',
+                //   key: 'lobe',
+                //   name: 'Lobe Chat',
+                //   onClick: () => {
+                //     onOpenLink('lobe', record.key);
+                //   },
+                // },
                 {
                   node: 'item',
                   key: 'ama',