Browse Source

增加历史对话和查询不到反馈

xueyiming 1 tuần trước cách đây
mục cha
commit
f81929dffe

+ 56 - 2
applications/utils/mysql/mapper.py

@@ -47,11 +47,12 @@ class ChatResult(BaseMySQLClient):
         ai_answer,
         ai_answer,
         ai_source,
         ai_source,
         ai_status,
         ai_status,
+        is_web=None,
     ):
     ):
         query = """
         query = """
                     INSERT INTO chat_res
                     INSERT INTO chat_res
-                        (query, dataset_ids, search_res, chat_res, score, has_answer, ai_answer, ai_source, ai_status) 
-                        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);
+                        (query, dataset_ids, search_res, chat_res, score, has_answer, ai_answer, ai_source, ai_status, is_web) 
+                        VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
                 """
                 """
         return await self.pool.async_save(
         return await self.pool.async_save(
             query=query,
             query=query,
@@ -65,5 +66,58 @@ class ChatResult(BaseMySQLClient):
                 ai_answer,
                 ai_answer,
                 ai_source,
                 ai_source,
                 ai_status,
                 ai_status,
+                is_web,
             ),
             ),
         )
         )
+
+    async def select_chat_results(
+        self, page_num: int, page_size: int, order_by=None, is_web: int = 1
+    ):
+        """
+        分页查询 chat_res 表,并返回分页信息
+        :param page_num: 页码,从 1 开始
+        :param page_size: 每页数量
+        :param order_by: 排序条件,例如 {"id": "desc"} 或 {"created_at": "asc"}
+        :param is_web: 是否为 Web 数据(默认 1)
+        :return: dict,包含 entities、total_count、page、page_size、total_pages
+        """
+        if order_by is None:
+            order_by = {"id": "desc"}
+        offset = (page_num - 1) * page_size
+
+        # 动态拼接 where 条件
+        where_clauses = ["is_web = %s"]
+        params = [is_web]
+
+        where_sql = " AND ".join(where_clauses)
+
+        # 动态拼接 order by
+        order_field, order_direction = list(order_by.items())[0]
+        order_sql = f"ORDER BY {order_field} {order_direction.upper()}"
+
+        # 查询总数
+        count_query = f"SELECT COUNT(*) as total_count FROM chat_res WHERE {where_sql};"
+        count_result = await self.pool.async_fetch(
+            query=count_query, params=tuple(params)
+        )
+        total_count = count_result[0]["total_count"] if count_result else 0
+
+        # 查询分页数据
+        query = f"""
+            SELECT search_res, query,create_time, chat_res, ai_answer FROM chat_res
+            WHERE {where_sql}
+            {order_sql}
+            LIMIT %s OFFSET %s;
+        """
+        params.extend([page_size, offset])
+        entities = await self.pool.async_fetch(query=query, params=tuple(params))
+
+        total_pages = (total_count + page_size - 1) // page_size  # 向上取整
+
+        return {
+            "entities": entities,
+            "total_count": total_count,
+            "page": page_num,
+            "page_size": page_size,
+            "total_pages": total_pages,
+        }

+ 0 - 0
applications/utils/spider/__init__.py


+ 15 - 0
applications/utils/spider/study.py

@@ -0,0 +1,15 @@
+import json
+
+import requests
+
+
+def study(question):
+    url = "http://8.219.186.16:8079/generate-queries"
+    headers = {"Content-Type": "application/json"}
+    payload = {"question": question}
+
+    response = requests.post(url, headers=headers, data=json.dumps(payload))
+    if response.status_code == 200:
+        return response.json()  # 返回响应的 JSON 数据
+    else:
+        return f"请求失败,状态码: {response.status_code}"

+ 3 - 0
mcp_server/server.py

@@ -8,6 +8,7 @@ from mcp.server.lowlevel import Server
 from applications.resource import get_resource_manager
 from applications.resource import get_resource_manager
 from applications.utils.chat import RAGChatAgent
 from applications.utils.chat import RAGChatAgent
 from applications.utils.mysql import ChatResult
 from applications.utils.mysql import ChatResult
+from applications.utils.spider.study import study
 from routes.buleprint import query_search
 from routes.buleprint import query_search
 
 
 
 
@@ -67,6 +68,8 @@ async def rag_search(query_text: str):
     chat_result_mapper = ChatResult(resource.mysql_client)
     chat_result_mapper = ChatResult(resource.mysql_client)
     rag_chat_agent = RAGChatAgent()
     rag_chat_agent = RAGChatAgent()
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
+    if chat_result["status"] == 0:
+        study(query_text)
     llm_search_result = await rag_chat_agent.llm_search(query_text)
     llm_search_result = await rag_chat_agent.llm_search(query_text)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search_result)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search_result)
     data = {
     data = {

+ 37 - 3
routes/buleprint.py

@@ -20,6 +20,7 @@ from applications.resource import get_resource_manager
 from applications.search import HybridSearch
 from applications.search import HybridSearch
 from applications.utils.chat import RAGChatAgent
 from applications.utils.chat import RAGChatAgent
 from applications.utils.mysql import Dataset, Contents, ContentChunks, ChatResult
 from applications.utils.mysql import Dataset, Contents, ContentChunks, ChatResult
+from applications.utils.spider.study import study
 
 
 server_bp = Blueprint("api", __name__, url_prefix="/api")
 server_bp = Blueprint("api", __name__, url_prefix="/api")
 server_bp = cors(server_bp, allow_origin="*")
 server_bp = cors(server_bp, allow_origin="*")
@@ -385,7 +386,6 @@ async def chat():
     )
     )
     resource = get_resource_manager()
     resource = get_resource_manager()
     chat_result_mapper = ChatResult(resource.mysql_client)
     chat_result_mapper = ChatResult(resource.mysql_client)
-    resource = get_resource_manager()
     dataset_mapper = Dataset(resource.mysql_client)
     dataset_mapper = Dataset(resource.mysql_client)
     for result in query_results:
     for result in query_results:
         datasets = await dataset_mapper.select_dataset_by_id(result["datasetId"])
         datasets = await dataset_mapper.select_dataset_by_id(result["datasetId"])
@@ -395,20 +395,30 @@ async def chat():
 
 
     rag_chat_agent = RAGChatAgent()
     rag_chat_agent = RAGChatAgent()
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
+    if chat_result["status"] == 0:
+        study_res = study(query_text)
+        print(study_res)
     llm_search = await rag_chat_agent.llm_search(query_text)
     llm_search = await rag_chat_agent.llm_search(query_text)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search)
-    data = {"results": query_results, "chat_res": decision["result"]}
+    data = {
+        "results": query_results,
+        "chat_res": decision["result"],
+        "rag_summary": chat_result["summary"],
+        "llm_summary": llm_search["answer"],
+    }
     await chat_result_mapper.insert_chat_result(
     await chat_result_mapper.insert_chat_result(
         query_text,
         query_text,
         dataset_id_strs,
         dataset_id_strs,
-        json.dumps(data, ensure_ascii=False),
+        json.dumps(query_results, ensure_ascii=False),
         chat_result["summary"],
         chat_result["summary"],
         chat_result["relevance_score"],
         chat_result["relevance_score"],
         chat_result["status"],
         chat_result["status"],
         llm_search["answer"],
         llm_search["answer"],
         llm_search["source"],
         llm_search["source"],
         llm_search["status"],
         llm_search["status"],
+        is_web=1,
     )
     )
+    # data = {"results": query_results, "chat_res": 'chat_res', 'rag_summary': 'rag_summary', 'llm_summary': 'llm_summary'}
     return jsonify({"status_code": 200, "detail": "success", "data": data})
     return jsonify({"status_code": 200, "detail": "success", "data": data})
 
 
 
 
@@ -502,6 +512,8 @@ async def rag_search():
     chat_result_mapper = ChatResult(resource.mysql_client)
     chat_result_mapper = ChatResult(resource.mysql_client)
     rag_chat_agent = RAGChatAgent()
     rag_chat_agent = RAGChatAgent()
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
+    if chat_result["status"] == 0:
+        study(query_text)
     llm_search = await rag_chat_agent.llm_search(query_text)
     llm_search = await rag_chat_agent.llm_search(query_text)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search)
     decision = await rag_chat_agent.make_decision(chat_result, llm_search)
     data = {
     data = {
@@ -521,3 +533,25 @@ async def rag_search():
         llm_search["status"],
         llm_search["status"],
     )
     )
     return jsonify({"status_code": 200, "detail": "success", "data": data})
     return jsonify({"status_code": 200, "detail": "success", "data": data})
+
+
+@server_bp.route("/chat/history", methods=["GET"])
+async def chat_history():
+    page_num = int(request.args.get("page", 1))
+    page_size = int(request.args.get("pageSize", 10))
+    resource = get_resource_manager()
+    chat_result_mapper = ChatResult(resource.mysql_client)
+    result = await chat_result_mapper.select_chat_results(page_num, page_size)
+    return jsonify(
+        {
+            "status_code": 200,
+            "detail": "success",
+            "data": {
+                "entities": result["entities"],
+                "total_count": result["total_count"],
+                "page": result["page"],
+                "page_size": result["page_size"],
+                "total_pages": result["total_pages"],
+            },
+        }
+    )