|
@@ -0,0 +1,53 @@
|
|
|
+#! /usr/bin/env python
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+# vim:fenc=utf-8
|
|
|
+
|
|
|
+from openai import OpenAI
|
|
|
+from datetime import datetime
|
|
|
+import chat_service
|
|
|
+import prompt_templates
|
|
|
+from dialogue_manager import DialogueManager
|
|
|
+from logging_service import logger
|
|
|
+from message import MessageType
|
|
|
+
|
|
|
+
|
|
|
+class ResponseTypeDetector:
|
|
|
+ @staticmethod
|
|
|
+ def compose_dialogue(dialogue):
|
|
|
+ role_map = {'user': '用户', 'assistant': '客服'}
|
|
|
+ messages = []
|
|
|
+ for msg in dialogue:
|
|
|
+ if not msg['content']:
|
|
|
+ continue
|
|
|
+ if msg['role'] not in role_map:
|
|
|
+ continue
|
|
|
+ fmt_time = datetime.fromtimestamp(msg['timestamp'] / 1000).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
+ messages.append('[{}] [{}] {}'.format(role_map[msg['role']], fmt_time, msg['content']))
|
|
|
+ return '\n'.join(messages)
|
|
|
+
|
|
|
+ def __init__(self):
|
|
|
+ self.llm_client = OpenAI(
|
|
|
+ api_key=chat_service.VOLCENGINE_API_TOKEN,
|
|
|
+ base_url=chat_service.VOLCENGINE_BASE_URL
|
|
|
+ )
|
|
|
+ self.model_name = chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5
|
|
|
+
|
|
|
+ def detect_type(self, dialogue_history, next_message):
|
|
|
+ composed_dialogue = self.compose_dialogue(dialogue_history)
|
|
|
+ next_message = DialogueManager.format_dialogue_content(next_message)
|
|
|
+ prompt = prompt_templates.RESPONSE_TYPE_DETECT_PROMPT.format(
|
|
|
+ dialogue_history=composed_dialogue,
|
|
|
+ message=next_message
|
|
|
+ )
|
|
|
+ # logger.debug(prompt)
|
|
|
+ messages = [
|
|
|
+ {'role': 'system', 'content': '你是一个专业的智能助手'},
|
|
|
+ {'role': 'user', 'content': prompt}
|
|
|
+ ]
|
|
|
+ response = self.llm_client.chat.completions.create(messages=messages, model=self.model_name,
|
|
|
+ temperature=0.2, max_tokens=128)
|
|
|
+ response = response.choices[0].message.content.strip()
|
|
|
+ if response == '语音':
|
|
|
+ return MessageType.VOICE
|
|
|
+ else:
|
|
|
+ return MessageType.TEXT
|