|
@@ -394,20 +394,20 @@ async def chat():
|
|
|
result["datasetName"] = dataset_name
|
|
|
|
|
|
rag_chat_agent = RAGChatAgent()
|
|
|
- chat_res = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
- deepseek_search = await rag_chat_agent.search_with_deepseek(query_text)
|
|
|
- select = await rag_chat_agent.select_with_deepseek(chat_res, deepseek_search)
|
|
|
- data = {"results": query_results, "chat_res": select["result"]}
|
|
|
+ chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
+ llm_search = await rag_chat_agent.llm_search(query_text)
|
|
|
+ decision = await rag_chat_agent.make_decision(chat_result, llm_search)
|
|
|
+ data = {"results": query_results, "chat_res": decision["result"]}
|
|
|
await chat_result_mapper.insert_chat_result(
|
|
|
query_text,
|
|
|
dataset_id_strs,
|
|
|
json.dumps(data, ensure_ascii=False),
|
|
|
- chat_res["summary"],
|
|
|
- chat_res["relevance_score"],
|
|
|
- chat_res["status"],
|
|
|
- deepseek_search["answer"],
|
|
|
- deepseek_search["source"],
|
|
|
- deepseek_search["status"],
|
|
|
+ chat_result["summary"],
|
|
|
+ chat_result["relevance_score"],
|
|
|
+ chat_result["status"],
|
|
|
+ llm_search["answer"],
|
|
|
+ llm_search["source"],
|
|
|
+ llm_search["status"],
|
|
|
)
|
|
|
return jsonify({"status_code": 200, "detail": "success", "data": data})
|
|
|
|
|
@@ -500,23 +500,23 @@ async def rag_search():
|
|
|
resource = get_resource_manager()
|
|
|
chat_result_mapper = ChatResult(resource.mysql_client)
|
|
|
rag_chat_agent = RAGChatAgent()
|
|
|
- chat_res = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
- deepseek_search = await rag_chat_agent.search_with_deepseek(query_text)
|
|
|
- select = await rag_chat_agent.select_with_deepseek(chat_res, deepseek_search)
|
|
|
+ chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
|
|
|
+ llm_search = await rag_chat_agent.llm_search(query_text)
|
|
|
+ decision = await rag_chat_agent.make_decision(chat_result, llm_search)
|
|
|
data = {
|
|
|
- "result": select["result"],
|
|
|
- "status": select["status"],
|
|
|
- "relevance_score": select["relevance_score"],
|
|
|
+ "result": decision["result"],
|
|
|
+ "status": decision["status"],
|
|
|
+ "relevance_score": decision["relevance_score"],
|
|
|
}
|
|
|
await chat_result_mapper.insert_chat_result(
|
|
|
query_text,
|
|
|
dataset_id_strs,
|
|
|
json.dumps(query_results, ensure_ascii=False),
|
|
|
- chat_res["summary"],
|
|
|
- chat_res["relevance_score"],
|
|
|
- chat_res["status"],
|
|
|
- deepseek_search["answer"],
|
|
|
- deepseek_search["source"],
|
|
|
- deepseek_search["status"],
|
|
|
+ chat_result["summary"],
|
|
|
+ chat_result["relevance_score"],
|
|
|
+ chat_result["status"],
|
|
|
+ llm_search["answer"],
|
|
|
+ llm_search["source"],
|
|
|
+ llm_search["status"],
|
|
|
)
|
|
|
return jsonify({"status_code": 200, "detail": "success", "data": data})
|