Przeglądaj źródła

Update chat_service: support proxy for LLM client

StrayWarrior 1 tydzień temu
rodzic
commit
2dc39aaf6e
3 zmienionych plików z 23 dodań i 0 usunięć
  1. 11 0
      chat_service.py
  2. 6 0
      configs/dev.yaml
  3. 6 0
      configs/prod.yaml

+ 11 - 0
chat_service.py

@@ -7,6 +7,10 @@ import os
 import threading
 from typing import List, Dict, Optional
 from enum import Enum, auto
+
+import httpx
+
+import configs
 from logging_service import logger
 import cozepy
 from cozepy import Coze, TokenAuth, Message, ChatStatus, MessageType, JWTOAuthApp, JWTAuth
@@ -56,6 +60,13 @@ class OpenAICompatible:
         elif model_name in deepseek_models:
             llm_client = OpenAI(api_key=DEEPSEEK_API_TOKEN, base_url=DEEPSEEK_BASE_URL, **kwargs)
         elif model_name in openai_models:
+            socks_conf = configs.get().get('system', {}).get('outside_proxy', {}).get('socks5', {})
+            if socks_conf:
+                http_client = httpx.Client(
+                    timeout=httpx.Timeout(600, connect=5.0),
+                    proxy=f"socks5://{socks_conf['hostname']}:{socks_conf['port']}"
+                )
+                kwargs['http_client'] = http_client
             llm_client = OpenAI(api_key=OPENAI_API_TOKEN, base_url=OPENAI_BASE_URL, **kwargs)
         else:
             raise Exception("Unsupported model: %s" % model_name)

+ 6 - 0
configs/dev.yaml

@@ -48,6 +48,12 @@ chat_api:
     text_model: ep-20250414202859-6nkz5
     multimodal_model: ep-20250421193334-nz5wd
 
+system:
+  outside_proxy:
+    socks5:
+      hostname: 192.168.31.201
+      port: 1083
+
 debug_flags:
   disable_llm_api_call: False
   use_local_user_storage: True

+ 6 - 0
configs/prod.yaml

@@ -43,6 +43,12 @@ chat_api:
     text_model: ep-20250414202859-6nkz5
     multimodal_model: ep-20250421193334-nz5wd
 
+system:
+  outside_proxy:
+    socks5:
+      hostname: 192.168.31.201
+      port: 1083
+
 agent_behavior:
   message_aggregation_sec: 20
   active_conversation_schedule_param: