|
@@ -120,20 +120,31 @@ class OpenAICompatible:
|
|
|
elif model_name in OpenAICompatible.deepseek_models:
|
|
|
llm_client = OpenAI(api_key=DEEPSEEK_API_TOKEN, base_url=DEEPSEEK_BASE_URL, **kwargs)
|
|
|
elif model_name in OpenAICompatible.openai_models:
|
|
|
- socks_conf = configs.get().get('system', {}).get('outside_proxy', {}).get('socks5', {})
|
|
|
- if socks_conf:
|
|
|
- http_client = httpx.Client(
|
|
|
- timeout=httpx.Timeout(600, connect=5.0),
|
|
|
- proxy=f"socks5://{socks_conf['hostname']}:{socks_conf['port']}"
|
|
|
- )
|
|
|
- kwargs['http_client'] = http_client
|
|
|
+ kwargs['http_client'] = OpenAICompatible.create_outside_proxy_http_client()
|
|
|
llm_client = OpenAI(api_key=OPENAI_API_TOKEN, base_url=OPENAI_BASE_URL, **kwargs)
|
|
|
elif model_name in OpenAICompatible.openrouter_models:
|
|
|
+ kwargs['http_client'] = OpenAICompatible.create_outside_proxy_http_client()
|
|
|
llm_client = OpenAI(api_key=OPENROUTER_API_TOKEN, base_url=OPENROUTER_BASE_URL, **kwargs)
|
|
|
else:
|
|
|
raise Exception("Unsupported model: %s" % model_name)
|
|
|
return llm_client
|
|
|
|
|
|
+ @staticmethod
|
|
|
+ def create_outside_proxy_http_client() -> httpx.Client:
|
|
|
+ """
|
|
|
+ Create an HTTP client with outside proxy settings.
|
|
|
+ :return: Configured httpx.Client instance
|
|
|
+ """
|
|
|
+ socks_conf = configs.get().get('system', {}).get('outside_proxy', {}).get('socks5', {})
|
|
|
+ if socks_conf:
|
|
|
+ return httpx.Client(
|
|
|
+ timeout=httpx.Timeout(600, connect=5.0),
|
|
|
+ proxy=f"socks5://{socks_conf['hostname']}:{socks_conf['port']}"
|
|
|
+ )
|
|
|
+ # If no proxy is configured, return a standard client
|
|
|
+ logger.error("Outside proxy not configured, using default httpx client.")
|
|
|
+ return httpx.Client(timeout=httpx.Timeout(600, connect=5.0))
|
|
|
+
|
|
|
@staticmethod
|
|
|
def get_price(model_name: str) -> ModelPrice:
|
|
|
"""
|