瀏覽代碼

格式化代码

xueyiming 1 天之前
父節點
當前提交
0ab4b297fb
共有 3 個文件被更改,包括 47 次插入31 次删除
  1. 24 16
      applications/utils/search/qwen.py
  2. 4 2
      mcp_server/server.py
  3. 19 13
      routes/blueprint.py

+ 24 - 16
applications/utils/search/qwen.py

@@ -5,7 +5,12 @@ class QwenClient:
     def __init__(self):
         self.api_key = "sk-1022fe8e15ff4e0e9abc20541b281165"
 
-    def chat(self, model="qwen3-max", system_prompt="You are a helpful assistant.", user_prompt=""):
+    def chat(
+        self,
+        model="qwen3-max",
+        system_prompt="You are a helpful assistant.",
+        user_prompt="",
+    ):
         """
         普通聊天,不使用搜索功能
 
@@ -27,7 +32,7 @@ class QwenClient:
                 api_key=self.api_key,
                 model=model,
                 messages=messages,
-                result_format="message"
+                result_format="message",
             )
 
             if response.status_code != 200:
@@ -38,7 +43,13 @@ class QwenClient:
         except Exception as e:
             raise Exception(f"QwenClient chat失败: {str(e)}")
 
-    def search_and_chat(self, model="qwen3-max", system_prompt="You are a helpful assistant.", user_prompt="", search_strategy="max"):
+    def search_and_chat(
+        self,
+        model="qwen3-max",
+        system_prompt="You are a helpful assistant.",
+        user_prompt="",
+        search_strategy="max",
+    ):
         """
         搜索并聊天
 
@@ -65,9 +76,9 @@ class QwenClient:
                 search_options={
                     "forced_search": True,
                     "enable_source": True,
-                    "search_strategy": search_strategy
+                    "search_strategy": search_strategy,
                 },
-                result_format="message"
+                result_format="message",
             )
 
             if response.status_code != 200:
@@ -76,13 +87,10 @@ class QwenClient:
             content = response["output"]["choices"][0]["message"]["content"]
             search_results = []
 
-            if hasattr(response.output, 'search_info') and response.output.search_info:
+            if hasattr(response.output, "search_info") and response.output.search_info:
                 search_results = response.output.search_info.get("search_results", [])
 
-            return {
-                "content": content,
-                "search_results": search_results
-            }
+            return {"content": content, "search_results": search_results}
 
         except Exception as e:
             raise Exception(f"QwenClient search_and_chat失败: {str(e)}")
@@ -93,7 +101,6 @@ if __name__ == "__main__":
 
     # 测试
     try:
-
         # result = client.chat(user_prompt="hello")
         # print(result)
 
@@ -124,17 +131,18 @@ if __name__ == "__main__":
   "页面操作路径": "完整的页面操作路径"
 }"""
 
-
         # user_prompt = "请搜索 白瓜AI 官网"
 
-        result = client.search_and_chat(user_prompt=user_prompt, search_strategy="agent")
+        result = client.search_and_chat(
+            user_prompt=user_prompt, search_strategy="agent"
+        )
 
-        print("="*20 + "搜索结果" + "="*20)
+        print("=" * 20 + "搜索结果" + "=" * 20)
         for web in result["search_results"]:
             print(f"[{web['index']}]: [{web['title']}]({web['url']})")
 
-        print("="*20 + "回复内容" + "="*20)
+        print("=" * 20 + "回复内容" + "=" * 20)
         print(result["content"])
 
     except Exception as e:
-        print(f"错误: {e}")
+        print(f"错误: {e}")

+ 4 - 2
mcp_server/server.py

@@ -19,7 +19,7 @@ def create_mcp_server() -> Server:
 
     @app.call_tool()
     async def call_tool(
-            name: str, arguments: Dict[str, Any]
+        name: str, arguments: Dict[str, Any]
     ) -> List[types.TextContent]:
         """处理工具调用"""
         # ctx = app.request_context
@@ -79,7 +79,9 @@ async def process_question(question, query_text, rag_chat_agent):
             study_task_id = study(question)["task_id"]
 
         qwen_client = QwenClient()
-        llm_search = qwen_client.search_and_chat(user_prompt=question, search_strategy="agent")
+        llm_search = qwen_client.search_and_chat(
+            user_prompt=question, search_strategy="agent"
+        )
 
         # 执行决策逻辑
         decision = await rag_chat_agent.make_decision(question, chat_result, llm_search)

+ 19 - 13
routes/blueprint.py

@@ -303,16 +303,16 @@ async def content_list():
 
 
 async def query_search(
-        query_text,
-        filters=None,
-        search_type="",
-        anns_field="vector_text",
-        search_params=BASE_MILVUS_SEARCH_PARAMS,
-        _source=False,
-        es_size=10000,
-        sort_by=None,
-        milvus_size=20,
-        limit=10,
+    query_text,
+    filters=None,
+    search_type="",
+    anns_field="vector_text",
+    search_params=BASE_MILVUS_SEARCH_PARAMS,
+    _source=False,
+    es_size=10000,
+    sort_by=None,
+    milvus_size=20,
+    limit=10,
 ):
     if filters is None:
         filters = {}
@@ -416,7 +416,9 @@ async def chat():
     rag_chat_agent = RAGChatAgent()
     qwen_client = QwenClient()
     chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
-    llm_search = qwen_client.search_and_chat(user_prompt=query_text, search_strategy="agent")
+    llm_search = qwen_client.search_and_chat(
+        user_prompt=query_text, search_strategy="agent"
+    )
     decision = await rag_chat_agent.make_decision(query_text, chat_result, llm_search)
     data = {
         "results": query_results,
@@ -561,8 +563,12 @@ async def process_question(question, query_text, rag_chat_agent):
             study_task_id = study(question)["task_id"]
 
         qwen_client = QwenClient()
-        llm_search = qwen_client.search_and_chat(user_prompt=query, search_strategy="agent")
-        decision = await rag_chat_agent.make_decision(query_text, chat_result, llm_search)
+        llm_search = qwen_client.search_and_chat(
+            user_prompt=query, search_strategy="agent"
+        )
+        decision = await rag_chat_agent.make_decision(
+            query_text, chat_result, llm_search
+        )
 
         # 构建返回的数据
         data = {