Browse Source

Add new chat service: volcengine bot

StrayWarrior 1 week ago
parent
commit
3d506753fb
2 changed files with 16 additions and 9 deletions
  1. 14 9
      api_server.py
  2. 2 0
      chat_service.py

+ 14 - 9
api_server.py

@@ -115,7 +115,8 @@ def list_scenes():
         {'scene': 'greeting', 'display_name': '问候'},
         {'scene': 'greeting', 'display_name': '问候'},
         {'scene': 'chitchat', 'display_name': '闲聊'},
         {'scene': 'chitchat', 'display_name': '闲聊'},
         {'scene': 'profile_extractor', 'display_name': '画像提取'},
         {'scene': 'profile_extractor', 'display_name': '画像提取'},
-        {'scene': 'response_type_detector', 'display_name': '回复模态判断'}
+        {'scene': 'response_type_detector', 'display_name': '回复模态判断'},
+        {'scene': 'custom_debugging', 'display_name': '自定义调试场景'}
     ]
     ]
     return wrap_response(200, data=scenes)
     return wrap_response(200, data=scenes)
 
 
@@ -126,13 +127,15 @@ def get_base_prompt():
         'greeting': prompt_templates.GENERAL_GREETING_PROMPT,
         'greeting': prompt_templates.GENERAL_GREETING_PROMPT,
         'chitchat': prompt_templates.CHITCHAT_PROMPT_COZE,
         'chitchat': prompt_templates.CHITCHAT_PROMPT_COZE,
         'profile_extractor': prompt_templates.USER_PROFILE_EXTRACT_PROMPT,
         'profile_extractor': prompt_templates.USER_PROFILE_EXTRACT_PROMPT,
-        'response_type_detector': prompt_templates.RESPONSE_TYPE_DETECT_PROMPT
+        'response_type_detector': prompt_templates.RESPONSE_TYPE_DETECT_PROMPT,
+        'custom_debugging': '',
     }
     }
     model_map = {
     model_map = {
         'greeting': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_32K,
         'greeting': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_32K,
         'chitchat': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_32K,
         'chitchat': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_32K,
         'profile_extractor': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5,
         'profile_extractor': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5,
-        'response_type_detector': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5
+        'response_type_detector': chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5,
+        'custom_debugging': chat_service.VOLCENGINE_BOT_DEEPSEEK_V3_SEARCH
     }
     }
     if scene not in prompt_map:
     if scene not in prompt_map:
         return wrap_response(404, msg='scene not found')
         return wrap_response(404, msg='scene not found')
@@ -151,18 +154,20 @@ def run_openai_chat(messages, model_name, **kwargs):
     deepseek_models = [
     deepseek_models = [
         chat_service.DEEPSEEK_CHAT_MODEL,
         chat_service.DEEPSEEK_CHAT_MODEL,
     ]
     ]
+    volcengine_bots = [
+        chat_service.VOLCENGINE_BOT_DEEPSEEK_V3_SEARCH,
+    ]
     if model_name in volcengine_models:
     if model_name in volcengine_models:
         llm_client = OpenAI(api_key=chat_service.VOLCENGINE_API_TOKEN, base_url=chat_service.VOLCENGINE_BASE_URL)
         llm_client = OpenAI(api_key=chat_service.VOLCENGINE_API_TOKEN, base_url=chat_service.VOLCENGINE_BASE_URL)
-        response = llm_client.chat.completions.create(
-            messages=messages, model=model_name, **kwargs)
-        return response
+    elif model_name in volcengine_bots:
+        llm_client = OpenAI(api_key=chat_service.VOLCENGINE_API_TOKEN, base_url=chat_service.VOLCENGINE_BOT_BASE_URL)
     elif model_name in deepseek_models:
     elif model_name in deepseek_models:
         llm_client = OpenAI(api_key=chat_service.DEEPSEEK_API_TOKEN, base_url=chat_service.DEEPSEEK_BASE_URL)
         llm_client = OpenAI(api_key=chat_service.DEEPSEEK_API_TOKEN, base_url=chat_service.DEEPSEEK_BASE_URL)
-        response = llm_client.chat.completions.create(
-            messages=messages, model=model_name, temperature=1, top_p=0.7, max_tokens=1024)
-        return response
     else:
     else:
         raise Exception('model not supported')
         raise Exception('model not supported')
+    response = llm_client.chat.completions.create(
+        messages=messages, model=model_name, **kwargs)
+    return response
 
 
 def run_extractor_prompt(req_data):
 def run_extractor_prompt(req_data):
     prompt = req_data['prompt']
     prompt = req_data['prompt']

+ 2 - 0
chat_service.py

@@ -24,6 +24,8 @@ VOLCENGINE_MODEL_DOUBAO_1_5_VISION_PRO = 'ep-20250421193334-nz5wd'
 DEEPSEEK_API_TOKEN = 'sk-67daad8f424f4854bda7f1fed7ef220b'
 DEEPSEEK_API_TOKEN = 'sk-67daad8f424f4854bda7f1fed7ef220b'
 DEEPSEEK_BASE_URL = 'https://api.deepseek.com/'
 DEEPSEEK_BASE_URL = 'https://api.deepseek.com/'
 DEEPSEEK_CHAT_MODEL = 'deepseek-chat'
 DEEPSEEK_CHAT_MODEL = 'deepseek-chat'
+VOLCENGINE_BOT_BASE_URL = "https://ark.cn-beijing.volces.com/api/v3/bots"
+VOLCENGINE_BOT_DEEPSEEK_V3_SEARCH = "bot-20250427173459-9h2xp"
 
 
 class ChatServiceType(Enum):
 class ChatServiceType(Enum):
     OPENAI_COMPATIBLE = auto()
     OPENAI_COMPATIBLE = auto()