server.py 2.8 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. import asyncio
  2. import json
  3. from typing import Any, Dict, List
  4. import mcp.types as types
  5. from mcp.server.lowlevel import Server
  6. from applications.resource import get_resource_manager
  7. from applications.utils.chat import RAGChatAgent
  8. from applications.utils.mysql import ChatResult
  9. from routes.buleprint import query_search
  10. def create_mcp_server() -> Server:
  11. """创建并配置MCP服务器"""
  12. app = Server("mcp-rag-server")
  13. @app.call_tool()
  14. async def call_tool(
  15. name: str, arguments: Dict[str, Any]
  16. ) -> List[types.TextContent]:
  17. """处理工具调用"""
  18. # ctx = app.request_context
  19. if name == "rag-search":
  20. data = await rag_search(arguments["query_text"])
  21. result = json.dumps(data, ensure_ascii=False, indent=2)
  22. else:
  23. raise ValueError(f"Unknown tool: {name}")
  24. return [types.TextContent(type="text", text=result)]
  25. @app.list_tools()
  26. async def list_tools() -> List[types.Tool]:
  27. return [
  28. types.Tool(
  29. name="rag-search",
  30. title="RAG搜索",
  31. description="搜索内容并生成总结",
  32. inputSchema={
  33. "type": "object",
  34. "properties": {
  35. "query_text": {
  36. "type": "string",
  37. "description": "用户输入的查询文本",
  38. }
  39. },
  40. "required": ["query_text"], # 只强制 query_text 必填
  41. "additionalProperties": False,
  42. },
  43. ),
  44. ]
  45. return app
  46. async def rag_search(query_text: str):
  47. dataset_id_strs = "11,12"
  48. dataset_ids = dataset_id_strs.split(",")
  49. search_type = "hybrid"
  50. query_results = await query_search(
  51. query_text=query_text,
  52. filters={"dataset_id": dataset_ids},
  53. search_type=search_type,
  54. )
  55. resource = get_resource_manager()
  56. chat_result_mapper = ChatResult(resource.mysql_client)
  57. rag_chat_agent = RAGChatAgent()
  58. chat_result = await rag_chat_agent.chat_with_deepseek(query_text, query_results)
  59. llm_search_result = await rag_chat_agent.llm_search(query_text)
  60. decision = await rag_chat_agent.make_decision(chat_result, llm_search_result)
  61. data = {
  62. "result": decision["result"],
  63. "status": decision["status"],
  64. "relevance_score": decision["relevance_score"],
  65. }
  66. await chat_result_mapper.insert_chat_result(
  67. query_text,
  68. dataset_id_strs,
  69. json.dumps(query_results, ensure_ascii=False),
  70. chat_result["summary"],
  71. chat_result["relevance_score"],
  72. chat_result["status"],
  73. llm_search_result["answer"],
  74. llm_search_result["source"],
  75. llm_search_result["status"],
  76. )
  77. return data