Просмотр исходного кода

feat: support zhipu's ChatGLM (close #289)

JustSong 2 лет назад
Родитель
Сommit
26c6719ea3
9 измененных файлов с 403 добавлено и 49 удалено
  1. 1 2
      README.md
  2. 2 0
      common/constants.go
  3. 6 2
      common/model-ratio.go
  4. 27 18
      controller/model.go
  5. 59 6
      controller/relay-text.go
  6. 290 0
      controller/relay-zhipu.go
  7. 1 2
      go.mod
  8. 2 5
      go.sum
  9. 15 14
      web/src/constants/channel.constants.js

+ 1 - 2
README.md

@@ -64,6 +64,7 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
    + [x] [Anthropic Claude 系列模型](https://anthropic.com)
    + [x] [Google PaLM2 系列模型](https://developers.generativeai.google)
    + [x] [百度文心一言系列模型](https://cloud.baidu.com/doc/WENXINWORKSHOP/index.html)
+   + [x] [智谱 ChatGLM 系列模型](https://bigmodel.cn)
    + [x] [API Distribute](https://api.gptjk.top/register?aff=QGxj)
    + [x] [OpenAI-SB](https://openai-sb.com)
    + [x] [API2D](https://api2d.com/r/197971)
@@ -95,8 +96,6 @@ _✨ All in one 的 OpenAI 接口,整合各种 API 访问方式,开箱即用
     + 邮箱登录注册以及通过邮箱进行密码重置。
     + [GitHub 开放授权](https://github.com/settings/applications/new)。
     + 微信公众号授权(需要额外部署 [WeChat Server](https://github.com/songquanpeng/wechat-server))。
-21. 支持 [ChatGLM](https://github.com/THUDM/ChatGLM2-6B)。
-22. 未来其他大模型开放 API 后,将第一时间支持,并将其封装成同样的 API 访问方式。
 
 ## 部署
 ### 基于 Docker 进行部署

+ 2 - 0
common/constants.go

@@ -153,6 +153,7 @@ const (
 	ChannelTypeAIGC2D    = 13
 	ChannelTypeAnthropic = 14
 	ChannelTypeBaidu     = 15
+	ChannelTypeZhipu     = 16
 )
 
 var ChannelBaseURLs = []string{
@@ -172,4 +173,5 @@ var ChannelBaseURLs = []string{
 	"https://api.aigc2d.com",        // 13
 	"https://api.anthropic.com",     // 14
 	"https://aip.baidubce.com",      // 15
+	"https://open.bigmodel.cn",      // 16
 }

+ 6 - 2
common/model-ratio.go

@@ -8,6 +8,7 @@ import "encoding/json"
 // https://openai.com/pricing
 // TODO: when a new api is enabled, check the pricing here
 // 1 === $0.002 / 1K tokens
+// 1 === ¥0.014 / 1k tokens
 var ModelRatio = map[string]float64{
 	"gpt-4":                   15,
 	"gpt-4-0314":              15,
@@ -39,9 +40,12 @@ var ModelRatio = map[string]float64{
 	"dall-e":                  8,
 	"claude-instant-1":        0.75,
 	"claude-2":                30,
-	"ERNIE-Bot":               1,    // 0.012元/千tokens
-	"ERNIE-Bot-turbo":         0.67, // 0.008元/千tokens
+	"ERNIE-Bot":               0.8572, // ¥0.012 / 1k tokens
+	"ERNIE-Bot-turbo":         0.5715, // ¥0.008 / 1k tokens
 	"PaLM-2":                  1,
+	"chatglm_pro":             0.7143, // ¥0.01 / 1k tokens
+	"chatglm_std":             0.3572, // ¥0.005 / 1k tokens
+	"chatglm_lite":            0.1429, // ¥0.002 / 1k tokens
 }
 
 func ModelRatio2JSONString() string {

+ 27 - 18
controller/model.go

@@ -252,24 +252,6 @@ func init() {
 			Root:       "code-davinci-edit-001",
 			Parent:     nil,
 		},
-		{
-			Id:         "ChatGLM",
-			Object:     "model",
-			Created:    1677649963,
-			OwnedBy:    "thudm",
-			Permission: permission,
-			Root:       "ChatGLM",
-			Parent:     nil,
-		},
-		{
-			Id:         "ChatGLM2",
-			Object:     "model",
-			Created:    1677649963,
-			OwnedBy:    "thudm",
-			Permission: permission,
-			Root:       "ChatGLM2",
-			Parent:     nil,
-		},
 		{
 			Id:         "claude-instant-1",
 			Object:     "model",
@@ -315,6 +297,33 @@ func init() {
 			Root:       "PaLM-2",
 			Parent:     nil,
 		},
+		{
+			Id:         "chatglm_pro",
+			Object:     "model",
+			Created:    1677649963,
+			OwnedBy:    "zhipu",
+			Permission: permission,
+			Root:       "chatglm_pro",
+			Parent:     nil,
+		},
+		{
+			Id:         "chatglm_std",
+			Object:     "model",
+			Created:    1677649963,
+			OwnedBy:    "zhipu",
+			Permission: permission,
+			Root:       "chatglm_std",
+			Parent:     nil,
+		},
+		{
+			Id:         "chatglm_lite",
+			Object:     "model",
+			Created:    1677649963,
+			OwnedBy:    "zhipu",
+			Permission: permission,
+			Root:       "chatglm_lite",
+			Parent:     nil,
+		},
 	}
 	openAIModelsMap = make(map[string]OpenAIModels)
 	for _, model := range openAIModels {

+ 59 - 6
controller/relay-text.go

@@ -19,6 +19,7 @@ const (
 	APITypeClaude
 	APITypePaLM
 	APITypeBaidu
+	APITypeZhipu
 )
 
 func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
@@ -84,6 +85,8 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 		apiType = APITypeBaidu
 	} else if strings.HasPrefix(textRequest.Model, "PaLM") {
 		apiType = APITypePaLM
+	} else if strings.HasPrefix(textRequest.Model, "chatglm_") {
+		apiType = APITypeZhipu
 	}
 	baseURL := common.ChannelBaseURLs[channelType]
 	requestURL := c.Request.URL.String()
@@ -134,6 +137,12 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 		apiKey := c.Request.Header.Get("Authorization")
 		apiKey = strings.TrimPrefix(apiKey, "Bearer ")
 		fullRequestURL += "?key=" + apiKey
+	case APITypeZhipu:
+		method := "invoke"
+		if textRequest.Stream {
+			method = "sse-invoke"
+		}
+		fullRequestURL = fmt.Sprintf("https://open.bigmodel.cn/api/paas/v3/model-api/%s/%s", textRequest.Model, method)
 	}
 	var promptTokens int
 	var completionTokens int
@@ -200,6 +209,13 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
 		}
 		requestBody = bytes.NewBuffer(jsonStr)
+	case APITypeZhipu:
+		zhipuRequest := requestOpenAI2Zhipu(textRequest)
+		jsonStr, err := json.Marshal(zhipuRequest)
+		if err != nil {
+			return errorWrapper(err, "marshal_text_request_failed", http.StatusInternalServerError)
+		}
+		requestBody = bytes.NewBuffer(jsonStr)
 	}
 	req, err := http.NewRequest(c.Request.Method, fullRequestURL, requestBody)
 	if err != nil {
@@ -221,6 +237,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			anthropicVersion = "2023-06-01"
 		}
 		req.Header.Set("anthropic-version", anthropicVersion)
+	case APITypeZhipu:
+		token := getZhipuToken(apiKey)
+		req.Header.Set("Authorization", token)
 	}
 	req.Header.Set("Content-Type", c.Request.Header.Get("Content-Type"))
 	req.Header.Set("Accept", c.Request.Header.Get("Accept"))
@@ -252,11 +271,15 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			if strings.HasPrefix(textRequest.Model, "gpt-4") {
 				completionRatio = 2
 			}
-			if isStream && apiType != APITypeBaidu {
+			if isStream && apiType != APITypeBaidu && apiType != APITypeZhipu {
 				completionTokens = countTokenText(streamResponseText, textRequest.Model)
 			} else {
 				promptTokens = textResponse.Usage.PromptTokens
 				completionTokens = textResponse.Usage.CompletionTokens
+				if apiType == APITypeZhipu {
+					// zhipu's API does not return prompt tokens & completion tokens
+					promptTokens = textResponse.Usage.TotalTokens
+				}
 			}
 			quota = promptTokens + int(float64(completionTokens)*completionRatio)
 			quota = int(float64(quota) * ratio)
@@ -302,7 +325,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			if err != nil {
 				return err
 			}
-			textResponse.Usage = *usage
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
 			return nil
 		}
 	case APITypeClaude:
@@ -318,7 +343,9 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			if err != nil {
 				return err
 			}
-			textResponse.Usage = *usage
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
 			return nil
 		}
 	case APITypeBaidu:
@@ -327,14 +354,18 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			if err != nil {
 				return err
 			}
-			textResponse.Usage = *usage
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
 			return nil
 		} else {
 			err, usage := baiduHandler(c, resp)
 			if err != nil {
 				return err
 			}
-			textResponse.Usage = *usage
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
 			return nil
 		}
 	case APITypePaLM:
@@ -350,7 +381,29 @@ func relayTextHelper(c *gin.Context, relayMode int) *OpenAIErrorWithStatusCode {
 			if err != nil {
 				return err
 			}
-			textResponse.Usage = *usage
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
+			return nil
+		}
+	case APITypeZhipu:
+		if isStream {
+			err, usage := zhipuStreamHandler(c, resp)
+			if err != nil {
+				return err
+			}
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
+			return nil
+		} else {
+			err, usage := zhipuHandler(c, resp)
+			if err != nil {
+				return err
+			}
+			if usage != nil {
+				textResponse.Usage = *usage
+			}
 			return nil
 		}
 	default:

+ 290 - 0
controller/relay-zhipu.go

@@ -0,0 +1,290 @@
+package controller
+
+import (
+	"bufio"
+	"encoding/json"
+	"github.com/gin-gonic/gin"
+	"github.com/golang-jwt/jwt"
+	"io"
+	"net/http"
+	"one-api/common"
+	"strings"
+	"sync"
+	"time"
+)
+
+// https://open.bigmodel.cn/doc/api#chatglm_std
+// chatglm_std, chatglm_lite
+// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
+// https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
+
+type ZhipuMessage struct {
+	Role    string `json:"role"`
+	Content string `json:"content"`
+}
+
+type ZhipuRequest struct {
+	Prompt      []ZhipuMessage `json:"prompt"`
+	Temperature float64        `json:"temperature,omitempty"`
+	TopP        float64        `json:"top_p,omitempty"`
+	RequestId   string         `json:"request_id,omitempty"`
+	Incremental bool           `json:"incremental,omitempty"`
+}
+
+type ZhipuResponseData struct {
+	TaskId     string         `json:"task_id"`
+	RequestId  string         `json:"request_id"`
+	TaskStatus string         `json:"task_status"`
+	Choices    []ZhipuMessage `json:"choices"`
+	Usage      `json:"usage"`
+}
+
+type ZhipuResponse struct {
+	Code    int               `json:"code"`
+	Msg     string            `json:"msg"`
+	Success bool              `json:"success"`
+	Data    ZhipuResponseData `json:"data"`
+}
+
+type ZhipuStreamMetaResponse struct {
+	RequestId  string `json:"request_id"`
+	TaskId     string `json:"task_id"`
+	TaskStatus string `json:"task_status"`
+	Usage      `json:"usage"`
+}
+
+type zhipuTokenData struct {
+	Token      string
+	ExpiryTime time.Time
+}
+
+var zhipuTokens sync.Map
+var expSeconds int64 = 24 * 3600
+
+func getZhipuToken(apikey string) string {
+	data, ok := zhipuTokens.Load(apikey)
+	if ok {
+		tokenData := data.(zhipuTokenData)
+		if time.Now().Before(tokenData.ExpiryTime) {
+			return tokenData.Token
+		}
+	}
+
+	split := strings.Split(apikey, ".")
+	if len(split) != 2 {
+		common.SysError("invalid zhipu key: " + apikey)
+		return ""
+	}
+
+	id := split[0]
+	secret := split[1]
+
+	expMillis := time.Now().Add(time.Duration(expSeconds)*time.Second).UnixNano() / 1e6
+	expiryTime := time.Now().Add(time.Duration(expSeconds) * time.Second)
+
+	timestamp := time.Now().UnixNano() / 1e6
+
+	payload := jwt.MapClaims{
+		"api_key":   id,
+		"exp":       expMillis,
+		"timestamp": timestamp,
+	}
+
+	token := jwt.NewWithClaims(jwt.SigningMethodHS256, payload)
+
+	token.Header["alg"] = "HS256"
+	token.Header["sign_type"] = "SIGN"
+
+	tokenString, err := token.SignedString([]byte(secret))
+	if err != nil {
+		return ""
+	}
+
+	zhipuTokens.Store(apikey, zhipuTokenData{
+		Token:      tokenString,
+		ExpiryTime: expiryTime,
+	})
+
+	return tokenString
+}
+
+func requestOpenAI2Zhipu(request GeneralOpenAIRequest) *ZhipuRequest {
+	messages := make([]ZhipuMessage, 0, len(request.Messages))
+	for _, message := range request.Messages {
+		messages = append(messages, ZhipuMessage{
+			Role:    message.Role,
+			Content: message.Content,
+		})
+	}
+	return &ZhipuRequest{
+		Prompt:      messages,
+		Temperature: request.Temperature,
+		TopP:        request.TopP,
+		Incremental: false,
+	}
+}
+
+func responseZhipu2OpenAI(response *ZhipuResponse) *OpenAITextResponse {
+	fullTextResponse := OpenAITextResponse{
+		Id:      response.Data.TaskId,
+		Object:  "chat.completion",
+		Created: common.GetTimestamp(),
+		Choices: make([]OpenAITextResponseChoice, 0, len(response.Data.Choices)),
+		Usage:   response.Data.Usage,
+	}
+	for i, choice := range response.Data.Choices {
+		openaiChoice := OpenAITextResponseChoice{
+			Index: i,
+			Message: Message{
+				Role:    choice.Role,
+				Content: strings.Trim(choice.Content, "\""),
+			},
+			FinishReason: "",
+		}
+		if i == len(response.Data.Choices)-1 {
+			openaiChoice.FinishReason = "stop"
+		}
+		fullTextResponse.Choices = append(fullTextResponse.Choices, openaiChoice)
+	}
+	return &fullTextResponse
+}
+
+func streamResponseZhipu2OpenAI(zhipuResponse string) *ChatCompletionsStreamResponse {
+	var choice ChatCompletionsStreamResponseChoice
+	choice.Delta.Content = zhipuResponse
+	choice.FinishReason = ""
+	response := ChatCompletionsStreamResponse{
+		Object:  "chat.completion.chunk",
+		Created: common.GetTimestamp(),
+		Model:   "chatglm",
+		Choices: []ChatCompletionsStreamResponseChoice{choice},
+	}
+	return &response
+}
+
+func streamMetaResponseZhipu2OpenAI(zhipuResponse *ZhipuStreamMetaResponse) (*ChatCompletionsStreamResponse, *Usage) {
+	var choice ChatCompletionsStreamResponseChoice
+	choice.Delta.Content = ""
+	choice.FinishReason = "stop"
+	response := ChatCompletionsStreamResponse{
+		Id:      zhipuResponse.RequestId,
+		Object:  "chat.completion.chunk",
+		Created: common.GetTimestamp(),
+		Model:   "chatglm",
+		Choices: []ChatCompletionsStreamResponseChoice{choice},
+	}
+	return &response, &zhipuResponse.Usage
+}
+
+func zhipuStreamHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
+	var usage *Usage
+	scanner := bufio.NewScanner(resp.Body)
+	scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+		if atEOF && len(data) == 0 {
+			return 0, nil, nil
+		}
+		if i := strings.Index(string(data), "\n"); i >= 0 {
+			return i + 1, data[0:i], nil
+		}
+		if atEOF {
+			return len(data), data, nil
+		}
+		return 0, nil, nil
+	})
+	dataChan := make(chan string)
+	metaChan := make(chan string)
+	stopChan := make(chan bool)
+	go func() {
+		for scanner.Scan() {
+			data := scanner.Text()
+			data = strings.Trim(data, "\"")
+			if len(data) < 5 { // ignore blank line or wrong format
+				continue
+			}
+			if data[:5] == "data:" {
+				dataChan <- data[5:]
+			} else if data[:5] == "meta:" {
+				metaChan <- data[5:]
+			}
+		}
+		stopChan <- true
+	}()
+	c.Writer.Header().Set("Content-Type", "text/event-stream")
+	c.Writer.Header().Set("Cache-Control", "no-cache")
+	c.Writer.Header().Set("Connection", "keep-alive")
+	c.Writer.Header().Set("Transfer-Encoding", "chunked")
+	c.Writer.Header().Set("X-Accel-Buffering", "no")
+	c.Stream(func(w io.Writer) bool {
+		select {
+		case data := <-dataChan:
+			response := streamResponseZhipu2OpenAI(data)
+			jsonResponse, err := json.Marshal(response)
+			if err != nil {
+				common.SysError("error marshalling stream response: " + err.Error())
+				return true
+			}
+			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+			return true
+		case data := <-metaChan:
+			var zhipuResponse ZhipuStreamMetaResponse
+			err := json.Unmarshal([]byte(data), &zhipuResponse)
+			if err != nil {
+				common.SysError("error unmarshalling stream response: " + err.Error())
+				return true
+			}
+			response, zhipuUsage := streamMetaResponseZhipu2OpenAI(&zhipuResponse)
+			jsonResponse, err := json.Marshal(response)
+			if err != nil {
+				common.SysError("error marshalling stream response: " + err.Error())
+				return true
+			}
+			usage = zhipuUsage
+			c.Render(-1, common.CustomEvent{Data: "data: " + string(jsonResponse)})
+			return true
+		case <-stopChan:
+			c.Render(-1, common.CustomEvent{Data: "data: [DONE]"})
+			return false
+		}
+	})
+	err := resp.Body.Close()
+	if err != nil {
+		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+	}
+	return nil, usage
+}
+
+func zhipuHandler(c *gin.Context, resp *http.Response) (*OpenAIErrorWithStatusCode, *Usage) {
+	var zhipuResponse ZhipuResponse
+	responseBody, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return errorWrapper(err, "read_response_body_failed", http.StatusInternalServerError), nil
+	}
+	err = resp.Body.Close()
+	if err != nil {
+		return errorWrapper(err, "close_response_body_failed", http.StatusInternalServerError), nil
+	}
+	err = json.Unmarshal(responseBody, &zhipuResponse)
+	if err != nil {
+		return errorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError), nil
+	}
+	if !zhipuResponse.Success {
+		return &OpenAIErrorWithStatusCode{
+			OpenAIError: OpenAIError{
+				Message: zhipuResponse.Msg,
+				Type:    "zhipu_error",
+				Param:   "",
+				Code:    zhipuResponse.Code,
+			},
+			StatusCode: resp.StatusCode,
+		}, nil
+	}
+	fullTextResponse := responseZhipu2OpenAI(&zhipuResponse)
+	jsonResponse, err := json.Marshal(fullTextResponse)
+	if err != nil {
+		return errorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError), nil
+	}
+	c.Writer.Header().Set("Content-Type", "application/json")
+	c.Writer.WriteHeader(resp.StatusCode)
+	_, err = c.Writer.Write(jsonResponse)
+	return nil, &fullTextResponse.Usage
+}

+ 1 - 2
go.mod

@@ -11,6 +11,7 @@ require (
 	github.com/gin-gonic/gin v1.9.1
 	github.com/go-playground/validator/v10 v10.14.0
 	github.com/go-redis/redis/v8 v8.11.5
+	github.com/golang-jwt/jwt v3.2.2+incompatible
 	github.com/google/uuid v1.3.0
 	github.com/pkoukk/tiktoken-go v0.1.1
 	golang.org/x/crypto v0.9.0
@@ -20,7 +21,6 @@ require (
 )
 
 require (
-	github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff // indirect
 	github.com/bytedance/sonic v1.9.1 // indirect
 	github.com/cespare/xxhash/v2 v2.1.2 // indirect
 	github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
@@ -32,7 +32,6 @@ require (
 	github.com/go-playground/universal-translator v0.18.1 // indirect
 	github.com/go-sql-driver/mysql v1.6.0 // indirect
 	github.com/goccy/go-json v0.10.2 // indirect
-	github.com/gomodule/redigo v2.0.0+incompatible // indirect
 	github.com/gorilla/context v1.1.1 // indirect
 	github.com/gorilla/securecookie v1.1.1 // indirect
 	github.com/gorilla/sessions v1.2.1 // indirect

+ 2 - 5
go.sum

@@ -1,5 +1,3 @@
-github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff h1:RmdPFa+slIr4SCBg4st/l/vZWVe9QJKMXGO60Bxbe04=
-github.com/boj/redistore v0.0.0-20180917114910-cd5dcc76aeff/go.mod h1:+RTT1BOk5P97fT2CiHkbFQwkK3mjsFAP6zCYV2aXtjw=
 github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
 github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
 github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
@@ -54,10 +52,10 @@ github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB
 github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
 github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
 github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
+github.com/golang-jwt/jwt v3.2.2+incompatible h1:IfV12K8xAKAnZqdXVzCZ+TOjboZ2keLg81eXfW3O+oY=
+github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I=
 github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
 github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
-github.com/gomodule/redigo v2.0.0+incompatible h1:K/R+8tc58AaqLkqG2Ol3Qk+DR/TlNuhuh457pBFPtt0=
-github.com/gomodule/redigo v2.0.0+incompatible/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4=
 github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
 github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@@ -67,7 +65,6 @@ github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8
 github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
 github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=
 github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
-github.com/gorilla/sessions v1.1.1/go.mod h1:8KCfur6+4Mqcc6S0FEfKuN15Vl5MgXW92AE8ovaJD0w=
 github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7FsgI=
 github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
 github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=

+ 15 - 14
web/src/constants/channel.constants.js

@@ -1,17 +1,18 @@
 export const CHANNEL_OPTIONS = [
   { key: 1, text: 'OpenAI', value: 1, color: 'green' },
-  { key: 14, text: 'Anthropic', value: 14, color: 'black' },
-  { key: 8, text: '自定义', value: 8, color: 'pink' },
-  { key: 3, text: 'Azure', value: 3, color: 'olive' },
-  { key: 11, text: 'PaLM', value: 11, color: 'orange' },
-  { key: 15, text: 'Baidu', value: 15, color: 'blue' },
-  { key: 2, text: 'API2D', value: 2, color: 'blue' },
-  { key: 4, text: 'CloseAI', value: 4, color: 'teal' },
-  { key: 5, text: 'OpenAI-SB', value: 5, color: 'brown' },
-  { key: 6, text: 'OpenAI Max', value: 6, color: 'violet' },
-  { key: 7, text: 'OhMyGPT', value: 7, color: 'purple' },
-  { key: 9, text: 'AI.LS', value: 9, color: 'yellow' },
-  { key: 10, text: 'AI Proxy', value: 10, color: 'purple' },
-  { key: 12, text: 'API2GPT', value: 12, color: 'blue' },
-  { key: 13, text: 'AIGC2D', value: 13, color: 'purple' }
+  { key: 14, text: 'Anthropic Claude', value: 14, color: 'black' },
+  { key: 3, text: 'Azure OpenAI', value: 3, color: 'olive' },
+  { key: 11, text: 'Google PaLM2', value: 11, color: 'orange' },
+  { key: 15, text: '百度文心千帆', value: 15, color: 'blue' },
+  { key: 16, text: '智谱 ChatGLM', value: 16, color: 'violet' },
+  { key: 8, text: '自定义渠道', value: 8, color: 'pink' },
+  { key: 2, text: '代理:API2D', value: 2, color: 'blue' },
+  { key: 5, text: '代理:OpenAI-SB', value: 5, color: 'brown' },
+  { key: 7, text: '代理:OhMyGPT', value: 7, color: 'purple' },
+  { key: 10, text: '代理:AI Proxy', value: 10, color: 'purple' },
+  { key: 4, text: '代理:CloseAI', value: 4, color: 'teal' },
+  { key: 6, text: '代理:OpenAI Max', value: 6, color: 'violet' },
+  { key: 9, text: '代理:AI.LS', value: 9, color: 'yellow' },
+  { key: 12, text: '代理:API2GPT', value: 12, color: 'blue' },
+  { key: 13, text: '代理:AIGC2D', value: 13, color: 'purple' }
 ];