|
@@ -394,20 +394,20 @@ async def chat():
|
|
|
result["datasetName"] = dataset_name
|
|
|
|
|
|
rag_chat_agent = RAGChatAgent()
|
|
|
- chat_res = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
- deepseek_search = await rag_chat_agent.search_with_deepseek(query_text)
|
|
|
- select = await rag_chat_agent.select_with_deepseek(chat_res, deepseek_search)
|
|
|
- data = {"results": query_results, "chat_res": select["result"]}
|
|
|
+ chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
+ llm_search = await rag_chat_agent.llm_search(query_text)
|
|
|
+ decision = await rag_chat_agent.make_decision(chat_result, llm_search)
|
|
|
+ data = {"results": query_results, "chat_res": decision["result"]}
|
|
|
await chat_result_mapper.insert_chat_result(
|
|
|
query_text,
|
|
|
dataset_id_strs,
|
|
|
json.dumps(data, ensure_ascii=False),
|
|
|
- chat_res["summary"],
|
|
|
- chat_res["relevance_score"],
|
|
|
- chat_res["status"],
|
|
|
- deepseek_search["answer"],
|
|
|
- deepseek_search["source"],
|
|
|
- deepseek_search["status"],
|
|
|
+ chat_result["summary"],
|
|
|
+ chat_result["relevance_score"],
|
|
|
+ chat_result["status"],
|
|
|
+ llm_search["answer"],
|
|
|
+ llm_search["source"],
|
|
|
+ llm_search["status"],
|
|
|
)
|
|
|
return jsonify({"status_code": 200, "detail": "success", "data": data})
|
|
|
|
|
@@ -482,3 +482,41 @@ async def delete_task():
|
|
|
)
|
|
|
await build_graph_task.deal(doc_id)
|
|
|
return jsonify({"status_code": 200, "detail": "success", "data": {}})
|
|
|
+
|
|
|
+@server_bp.route("/rag/search", methods=["POST"])
|
|
|
+async def rag_search():
|
|
|
+ body = await request.get_json()
|
|
|
+ query_text = body.get("queryText")
|
|
|
+ dataset_id_strs = "11,12"
|
|
|
+ dataset_ids = dataset_id_strs.split(",")
|
|
|
+ search_type = "hybrid"
|
|
|
+
|
|
|
+ query_results = await query_search(
|
|
|
+ query_text=query_text,
|
|
|
+ filters={"dataset_id": dataset_ids},
|
|
|
+ search_type=search_type,
|
|
|
+ )
|
|
|
+
|
|
|
+ resource = get_resource_manager()
|
|
|
+ chat_result_mapper = ChatResult(resource.mysql_client)
|
|
|
+ rag_chat_agent = RAGChatAgent()
|
|
|
+ chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
+ llm_search = await rag_chat_agent.llm_search(query_text)
|
|
|
+ decision = await rag_chat_agent.make_decision(chat_result, llm_search)
|
|
|
+ data = {
|
|
|
+ "result": decision["result"],
|
|
|
+ "status": decision["status"],
|
|
|
+ "relevance_score": decision["relevance_score"],
|
|
|
+ }
|
|
|
+ await chat_result_mapper.insert_chat_result(
|
|
|
+ query_text,
|
|
|
+ dataset_id_strs,
|
|
|
+ json.dumps(query_results, ensure_ascii=False),
|
|
|
+ chat_result["summary"],
|
|
|
+ chat_result["relevance_score"],
|
|
|
+ chat_result["status"],
|
|
|
+ llm_search["answer"],
|
|
|
+ llm_search["source"],
|
|
|
+ llm_search["status"],
|
|
|
+ )
|
|
|
+ return jsonify({"status_code": 200, "detail": "success", "data": data})
|