|
@@ -1,70 +1,36 @@
|
|
|
#! /usr/bin/env python
|
|
|
# -*- coding: utf-8 -*-
|
|
|
# vim:fenc=utf-8
|
|
|
-
|
|
|
+import time
|
|
|
import logging
|
|
|
import werkzeug.exceptions
|
|
|
from flask import Flask, request, jsonify
|
|
|
-from datetime import datetime
|
|
|
from argparse import ArgumentParser
|
|
|
|
|
|
-from openai import OpenAI
|
|
|
-from pqai_agent.message import MessageType
|
|
|
from pqai_agent import configs
|
|
|
-import json
|
|
|
+
|
|
|
from pqai_agent import logging_service, chat_service, prompt_templates
|
|
|
-from pqai_agent.dialogue_manager import DialogueManager
|
|
|
from pqai_agent.history_dialogue_service import HistoryDialogueService
|
|
|
-from pqai_agent.response_type_detector import ResponseTypeDetector
|
|
|
from pqai_agent.user_manager import MySQLUserManager, MySQLUserRelationManager
|
|
|
-from pqai_agent.user_profile_extractor import UserProfileExtractor
|
|
|
+from pqai_agent_server.const import AgentApiConst
|
|
|
+from pqai_agent_server.models import MySQLSessionManager
|
|
|
+from pqai_agent_server.utils import wrap_response, quit_human_intervention_status
|
|
|
+from pqai_agent_server.utils import (
|
|
|
+ run_extractor_prompt,
|
|
|
+ run_chat_prompt,
|
|
|
+ run_response_type_prompt,
|
|
|
+)
|
|
|
|
|
|
app = Flask('agent_api_server')
|
|
|
logger = logging_service.logger
|
|
|
-
|
|
|
-def compose_openai_chat_messages_no_time(dialogue_history, multimodal=False):
|
|
|
- messages = []
|
|
|
- for entry in dialogue_history:
|
|
|
- role = entry['role']
|
|
|
- msg_type = entry.get('type', MessageType.TEXT)
|
|
|
- fmt_time = DialogueManager.format_timestamp(entry['timestamp'])
|
|
|
- if msg_type in (MessageType.IMAGE_GW, MessageType.IMAGE_QW, MessageType.GIF):
|
|
|
- if multimodal:
|
|
|
- messages.append({
|
|
|
- "role": role,
|
|
|
- "content": [
|
|
|
- {"type": "image_url", "image_url": {"url": entry["content"]}}
|
|
|
- ]
|
|
|
- })
|
|
|
- else:
|
|
|
- logger.warning("Image in non-multimodal mode")
|
|
|
- messages.append({
|
|
|
- "role": role,
|
|
|
- "content": "[图片]"
|
|
|
- })
|
|
|
- else:
|
|
|
- messages.append({
|
|
|
- "role": role,
|
|
|
- "content": f'{entry["content"]}'
|
|
|
- })
|
|
|
- return messages
|
|
|
-
|
|
|
-def wrap_response(code, msg=None, data=None):
|
|
|
- resp = {
|
|
|
- 'code': code,
|
|
|
- 'msg': msg
|
|
|
- }
|
|
|
- if code == 200 and not msg:
|
|
|
- resp['msg'] = 'success'
|
|
|
- if data:
|
|
|
- resp['data'] = data
|
|
|
- return jsonify(resp)
|
|
|
+const = AgentApiConst()
|
|
|
|
|
|
@app.route('/api/listStaffs', methods=['GET'])
|
|
|
def list_staffs():
|
|
|
staff_data = app.user_relation_manager.list_staffs()
|
|
|
return wrap_response(200, data=staff_data)
|
|
|
|
|
|
+
|
|
|
@app.route('/api/getStaffProfile', methods=['GET'])
|
|
|
def get_staff_profile():
|
|
|
staff_id = request.args['staff_id']
|
|
@@ -74,6 +40,7 @@ def get_staff_profile():
|
|
|
else:
|
|
|
return wrap_response(200, data=profile)
|
|
|
|
|
|
+
|
|
|
@app.route('/api/getUserProfile', methods=['GET'])
|
|
|
def get_user_profile():
|
|
|
user_id = request.args['user_id']
|
|
@@ -91,6 +58,7 @@ def get_user_profile():
|
|
|
}
|
|
|
return jsonify(resp)
|
|
|
|
|
|
+
|
|
|
@app.route('/api/listUsers', methods=['GET'])
|
|
|
def list_users():
|
|
|
user_name = request.args.get('user_name', None)
|
|
@@ -104,6 +72,7 @@ def list_users():
|
|
|
data = app.user_manager.list_users(user_name=user_name, user_union_id=user_union_id)
|
|
|
return jsonify({'code': 200, 'data': data})
|
|
|
|
|
|
+
|
|
|
@app.route('/api/getDialogueHistory', methods=['GET'])
|
|
|
def get_dialogue_history():
|
|
|
staff_id = request.args['staff_id']
|
|
@@ -112,6 +81,7 @@ def get_dialogue_history():
|
|
|
dialogue_history = app.history_dialogue_service.get_dialogue_history(staff_id, user_id, recent_minutes)
|
|
|
return jsonify({'code': 200, 'data': dialogue_history})
|
|
|
|
|
|
+
|
|
|
@app.route('/api/listModels', methods=['GET'])
|
|
|
def list_models():
|
|
|
models = [
|
|
@@ -143,6 +113,7 @@ def list_models():
|
|
|
]
|
|
|
return wrap_response(200, data=models)
|
|
|
|
|
|
+
|
|
|
@app.route('/api/listScenes', methods=['GET'])
|
|
|
def list_scenes():
|
|
|
scenes = [
|
|
@@ -154,6 +125,7 @@ def list_scenes():
|
|
|
]
|
|
|
return wrap_response(200, data=scenes)
|
|
|
|
|
|
+
|
|
|
@app.route('/api/getBasePrompt', methods=['GET'])
|
|
|
def get_base_prompt():
|
|
|
scene = request.args['scene']
|
|
@@ -179,112 +151,6 @@ def get_base_prompt():
|
|
|
}
|
|
|
return wrap_response(200, data=data)
|
|
|
|
|
|
-def run_openai_chat(messages, model_name, **kwargs):
|
|
|
- volcengine_models = [
|
|
|
- chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_32K,
|
|
|
- chat_service.VOLCENGINE_MODEL_DOUBAO_PRO_1_5,
|
|
|
- chat_service.VOLCENGINE_MODEL_DOUBAO_1_5_VISION_PRO,
|
|
|
- chat_service.VOLCENGINE_MODEL_DEEPSEEK_V3
|
|
|
- ]
|
|
|
- deepseek_models = [
|
|
|
- chat_service.DEEPSEEK_CHAT_MODEL,
|
|
|
- ]
|
|
|
- volcengine_bots = [
|
|
|
- chat_service.VOLCENGINE_BOT_DEEPSEEK_V3_SEARCH,
|
|
|
- ]
|
|
|
- if model_name in volcengine_models:
|
|
|
- llm_client = OpenAI(api_key=chat_service.VOLCENGINE_API_TOKEN, base_url=chat_service.VOLCENGINE_BASE_URL)
|
|
|
- elif model_name in volcengine_bots:
|
|
|
- llm_client = OpenAI(api_key=chat_service.VOLCENGINE_API_TOKEN, base_url=chat_service.VOLCENGINE_BOT_BASE_URL)
|
|
|
- elif model_name in deepseek_models:
|
|
|
- llm_client = OpenAI(api_key=chat_service.DEEPSEEK_API_TOKEN, base_url=chat_service.DEEPSEEK_BASE_URL)
|
|
|
- else:
|
|
|
- raise Exception('model not supported')
|
|
|
- response = llm_client.chat.completions.create(
|
|
|
- messages=messages, model=model_name, **kwargs)
|
|
|
- logger.debug(response)
|
|
|
- return response
|
|
|
-
|
|
|
-def run_extractor_prompt(req_data):
|
|
|
- prompt = req_data['prompt']
|
|
|
- user_profile = req_data['user_profile']
|
|
|
- staff_profile = req_data['staff_profile']
|
|
|
- dialogue_history = req_data['dialogue_history']
|
|
|
- model_name = req_data['model_name']
|
|
|
- prompt_context = {**staff_profile,
|
|
|
- **user_profile,
|
|
|
- 'dialogue_history': UserProfileExtractor.compose_dialogue(dialogue_history)}
|
|
|
- prompt = prompt.format(**prompt_context)
|
|
|
- messages = [
|
|
|
- {"role": "system", "content": '你是一个专业的用户画像分析助手。'},
|
|
|
- {"role": "user", "content": prompt}
|
|
|
- ]
|
|
|
- tools = [UserProfileExtractor.get_extraction_function()]
|
|
|
- response = run_openai_chat(messages, model_name, tools=tools, temperature=0)
|
|
|
- tool_calls = response.choices[0].message.tool_calls
|
|
|
- if tool_calls:
|
|
|
- function_call = tool_calls[0]
|
|
|
- if function_call.function.name == 'update_user_profile':
|
|
|
- profile_info = json.loads(function_call.function.arguments)
|
|
|
- return {k: v for k, v in profile_info.items() if v}
|
|
|
- else:
|
|
|
- logger.error("llm does not return update_user_profile")
|
|
|
- return {}
|
|
|
- else:
|
|
|
- return {}
|
|
|
-
|
|
|
-def run_chat_prompt(req_data):
|
|
|
- prompt = req_data['prompt']
|
|
|
- staff_profile = req_data.get('staff_profile', {})
|
|
|
- user_profile = req_data.get('user_profile', {})
|
|
|
- dialogue_history = req_data.get('dialogue_history', [])
|
|
|
- model_name = req_data['model_name']
|
|
|
- current_timestamp = req_data['current_timestamp'] / 1000
|
|
|
- prompt_context = {**staff_profile, **user_profile}
|
|
|
- current_hour = datetime.fromtimestamp(current_timestamp).hour
|
|
|
- prompt_context['last_interaction_interval'] = 0
|
|
|
- prompt_context['current_time_period'] = DialogueManager.get_time_context(current_hour)
|
|
|
- prompt_context['current_hour'] = current_hour
|
|
|
- prompt_context['if_first_interaction'] = False if dialogue_history else True
|
|
|
- last_message = dialogue_history[-1] if dialogue_history else {'role': 'assistant'}
|
|
|
- prompt_context['if_active_greeting'] = False if last_message['role'] == 'user' else True
|
|
|
-
|
|
|
- current_time_str = datetime.fromtimestamp(current_timestamp).strftime('%Y-%m-%d %H:%M:%S')
|
|
|
- system_prompt = {
|
|
|
- 'role': 'system',
|
|
|
- 'content': prompt.format(**prompt_context)
|
|
|
- }
|
|
|
- messages = [system_prompt]
|
|
|
- if req_data['scene'] == 'custom_debugging':
|
|
|
- messages.extend(compose_openai_chat_messages_no_time(dialogue_history))
|
|
|
- if '头像' in system_prompt['content']:
|
|
|
- messages.append({
|
|
|
- "role": 'user',
|
|
|
- "content": [
|
|
|
- {"type": "image_url", "image_url": {"url": user_profile['avatar']}}
|
|
|
- ]
|
|
|
- })
|
|
|
- else:
|
|
|
- messages.extend(DialogueManager.compose_chat_messages_openai_compatible(dialogue_history, current_time_str))
|
|
|
- return run_openai_chat(messages, model_name, temperature=1, top_p=0.7, max_tokens=1024)
|
|
|
-
|
|
|
-def run_response_type_prompt(req_data):
|
|
|
- prompt = req_data['prompt']
|
|
|
- dialogue_history = req_data['dialogue_history']
|
|
|
- model_name = req_data['model_name']
|
|
|
-
|
|
|
- composed_dialogue = ResponseTypeDetector.compose_dialogue(dialogue_history[:-1])
|
|
|
- next_message = DialogueManager.format_dialogue_content(dialogue_history[-1])
|
|
|
- prompt = prompt.format(
|
|
|
- dialogue_history=composed_dialogue,
|
|
|
- message=next_message
|
|
|
- )
|
|
|
- messages = [
|
|
|
- {'role': 'system', 'content': '你是一个专业的智能助手'},
|
|
|
- {'role': 'user', 'content': prompt}
|
|
|
- ]
|
|
|
- return run_openai_chat(messages, model_name,temperature=0.2, max_tokens=128)
|
|
|
-
|
|
|
|
|
|
@app.route('/api/runPrompt', methods=['POST'])
|
|
|
def run_prompt():
|
|
@@ -305,6 +171,100 @@ def run_prompt():
|
|
|
logger.error(e)
|
|
|
return wrap_response(500, msg='Error: {}'.format(e))
|
|
|
|
|
|
+
|
|
|
+@app.route("/api/healthCheck", methods=["GET"])
|
|
|
+def health_check():
|
|
|
+ return wrap_response(200, msg="OK")
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/getStaffSessionSummary", methods=["GET"])
|
|
|
+def get_staff_session_summary():
|
|
|
+ staff_id = request.args.get("staff_id")
|
|
|
+ status = request.args.get("status", const.DEFAULT_STAFF_STATUS)
|
|
|
+ page_id = request.args.get("page_id", const.DEFAULT_PAGE_ID)
|
|
|
+ page_size = request.args.get("page_size", const.DEFAULT_PAGE_SIZE)
|
|
|
+
|
|
|
+ # check params
|
|
|
+ try:
|
|
|
+ page_id = int(page_id)
|
|
|
+ page_size = int(page_size)
|
|
|
+ status = int(status)
|
|
|
+ except Exception as e:
|
|
|
+ return wrap_response(404, msg="Invalid parameter: {}".format(e))
|
|
|
+
|
|
|
+ staff_session_summary = app.session_manager.get_staff_sessions_summary(
|
|
|
+ staff_id, page_id, page_size, status
|
|
|
+ )
|
|
|
+
|
|
|
+ if not staff_session_summary:
|
|
|
+ return wrap_response(404, msg="staff not found")
|
|
|
+ else:
|
|
|
+ return wrap_response(200, data=staff_session_summary)
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/getStaffSessionList", methods=["GET"])
|
|
|
+def get_staff_session_list():
|
|
|
+ staff_id = request.args.get("staff_id")
|
|
|
+ if not staff_id:
|
|
|
+ return wrap_response(404, msg="staff_id is required")
|
|
|
+
|
|
|
+ page_size = request.args.get("page_size", const.DEFAULT_PAGE_SIZE)
|
|
|
+ page_id = request.args.get("page_id", const.DEFAULT_PAGE_ID)
|
|
|
+ staff_session_list = app.session_manager.get_staff_session_list(staff_id, page_id, page_size)
|
|
|
+ if not staff_session_list:
|
|
|
+ return wrap_response(404, msg="staff not found")
|
|
|
+
|
|
|
+ return wrap_response(200, data=staff_session_list)
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/getStaffList", methods=["GET"])
|
|
|
+def get_staff_list():
|
|
|
+ page_size = request.args.get("page_size", const.DEFAULT_PAGE_SIZE)
|
|
|
+ page_id = request.args.get("page_id", const.DEFAULT_PAGE_ID)
|
|
|
+ staff_list = app.user_manager.get_staff_list(page_id, page_size)
|
|
|
+ if not staff_list:
|
|
|
+ return wrap_response(404, msg="staff not found")
|
|
|
+ return wrap_response(200, data=staff_list)
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/getConversationList", methods=["GET"])
|
|
|
+def get_conversation_list():
|
|
|
+ """
|
|
|
+ 获取staff && customer的 私聊对话列表
|
|
|
+ :return:
|
|
|
+ """
|
|
|
+ staff_id = request.args.get("staff_id")
|
|
|
+ customer_id = request.args.get("customer_id")
|
|
|
+ if not staff_id or not customer_id:
|
|
|
+ return wrap_response(404, msg="staff_id and customer_id are required")
|
|
|
+
|
|
|
+ page = request.args.get("page")
|
|
|
+ response = app.session_manager.get_conversation_list(staff_id, customer_id, page, const.DEFAULT_CONVERSATION_SIZE)
|
|
|
+ return wrap_response(200, data=response)
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/sendMessage", methods=["POST"])
|
|
|
+def send_message():
|
|
|
+ return wrap_response(200, msg="暂不实现功能")
|
|
|
+
|
|
|
+
|
|
|
+@app.route("/api/quitHumanInterventionStatus", methods=["GET"])
|
|
|
+def quit_human_interventions_status():
|
|
|
+ """
|
|
|
+ 退出人工介入状态
|
|
|
+ :return:
|
|
|
+ """
|
|
|
+ staff_id = request.args.get("staff_id")
|
|
|
+ customer_id = request.args.get("customer_id")
|
|
|
+ # 测试环境: staff_id 强制等于1688854492669990
|
|
|
+ staff_id = 1688854492669990
|
|
|
+ if not customer_id or not staff_id:
|
|
|
+ return wrap_response(404, msg="user_id and staff_id are required")
|
|
|
+ response = quit_human_intervention_status(customer_id, staff_id)
|
|
|
+
|
|
|
+ return wrap_response(200, data=response)
|
|
|
+
|
|
|
+
|
|
|
@app.errorhandler(werkzeug.exceptions.BadRequest)
|
|
|
def handle_bad_request(e):
|
|
|
logger.error(e)
|
|
@@ -323,11 +283,26 @@ if __name__ == '__main__':
|
|
|
logging_level = logging.getLevelName(args.log_level)
|
|
|
logging_service.setup_root_logger(level=logging_level, logfile_name='agent_api_server.log')
|
|
|
|
|
|
+ # set db config
|
|
|
user_db_config = config['storage']['user']
|
|
|
staff_db_config = config['storage']['staff']
|
|
|
+ agent_state_db_config = config['storage']['agent_state']
|
|
|
+ chat_history_db_config = config['storage']['chat_history']
|
|
|
+
|
|
|
+ # init user manager
|
|
|
user_manager = MySQLUserManager(user_db_config['mysql'], user_db_config['table'], staff_db_config['table'])
|
|
|
app.user_manager = user_manager
|
|
|
|
|
|
+ # init session manager
|
|
|
+ session_manager = MySQLSessionManager(
|
|
|
+ db_config=user_db_config['mysql'],
|
|
|
+ staff_table=staff_db_config['table'],
|
|
|
+ user_table=user_db_config['table'],
|
|
|
+ agent_state_table=agent_state_db_config['table'],
|
|
|
+ chat_history_table=chat_history_db_config['table']
|
|
|
+ )
|
|
|
+ app.session_manager = session_manager
|
|
|
+
|
|
|
wecom_db_config = config['storage']['user_relation']
|
|
|
user_relation_manager = MySQLUserRelationManager(
|
|
|
user_db_config['mysql'], wecom_db_config['mysql'],
|