agent.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. """
  4. 使用 FastAPI + LangGraph 重构的 Agent 服务
  5. 提供强大的工作流管理和状态控制
  6. """
  7. import json
  8. import sys
  9. import os
  10. import time
  11. from typing import Any, Dict, List, Optional, TypedDict, Annotated
  12. from contextlib import asynccontextmanager
  13. # 保证可以导入本项目模块
  14. sys.path.append(os.path.dirname(os.path.abspath(__file__)))
  15. from fastapi import FastAPI, HTTPException, BackgroundTasks
  16. from fastapi.responses import JSONResponse
  17. from pydantic import BaseModel, Field
  18. import uvicorn
  19. # LangGraph 相关导入
  20. try:
  21. from langgraph.graph import StateGraph, END
  22. HAS_LANGGRAPH = True
  23. except ImportError:
  24. HAS_LANGGRAPH = False
  25. print("警告: LangGraph 未安装,将使用传统模式")
  26. from utils.logging_config import get_logger
  27. from agent_tools import QueryDataTool, IdentifyTool, StructureTool
  28. # 创建 logger
  29. logger = get_logger('Agent')
  30. # 状态定义
  31. class AgentState(TypedDict):
  32. request_id: str
  33. items: List[Dict[str, Any]]
  34. details: List[Dict[str, Any]]
  35. processed: int
  36. success: int
  37. current_index: int
  38. current_item: Optional[Dict[str, Any]]
  39. identify_result: Optional[Dict[str, Any]]
  40. error: Optional[str]
  41. status: str
  42. # 请求模型
  43. class TriggerRequest(BaseModel):
  44. requestId: str = Field(..., description="请求ID")
  45. # 响应模型
  46. class TriggerResponse(BaseModel):
  47. requestId: str
  48. processed: int
  49. success: int
  50. details: List[Dict[str, Any]]
  51. # 全局变量
  52. identify_tool = None
  53. @asynccontextmanager
  54. async def lifespan(app: FastAPI):
  55. """应用生命周期管理"""
  56. # 启动时初始化
  57. global identify_tool
  58. identify_tool = IdentifyTool()
  59. logger.info("Agent 服务启动完成")
  60. yield
  61. # 关闭时清理
  62. logger.info("Agent 服务正在关闭")
  63. # 创建 FastAPI 应用
  64. app = FastAPI(
  65. title="Knowledge Agent API",
  66. description="基于 LangGraph 的智能内容识别和结构化处理服务",
  67. version="2.0.0",
  68. lifespan=lifespan
  69. )
  70. # =========================
  71. # LangGraph 工作流定义
  72. # =========================
  73. def create_langgraph_workflow():
  74. """创建 LangGraph 工作流"""
  75. if not HAS_LANGGRAPH:
  76. return None
  77. # 工作流节点定义
  78. def fetch_data(state: AgentState) -> AgentState:
  79. """获取待处理数据"""
  80. try:
  81. request_id = state["request_id"]
  82. logger.info(f"开始获取数据: requestId={request_id}")
  83. items = QueryDataTool.fetch_crawl_data_list(request_id)
  84. state["items"] = items
  85. state["processed"] = len(items)
  86. state["status"] = "data_fetched"
  87. logger.info(f"数据获取完成: requestId={request_id}, 数量={len(items)}")
  88. return state
  89. except Exception as e:
  90. logger.error(f"获取数据失败: {e}")
  91. state["error"] = str(e)
  92. state["status"] = "error"
  93. return state
  94. def process_item(state: AgentState) -> AgentState:
  95. """处理单个数据项"""
  96. try:
  97. items = state["items"]
  98. current_index = state.get("current_index", 0)
  99. if current_index >= len(items):
  100. state["status"] = "completed"
  101. return state
  102. item = items[current_index]
  103. state["current_item"] = item
  104. state["current_index"] = current_index + 1
  105. # 处理当前项
  106. crawl_data = item.get('crawl_data') or {}
  107. # Step 1: 识别
  108. identify_result = identify_tool.run(
  109. crawl_data if isinstance(crawl_data, dict) else {}
  110. )
  111. state["identify_result"] = identify_result
  112. # Step 2: 结构化并入库
  113. affected = StructureTool.store_parsing_result(
  114. state["request_id"],
  115. item.get('raw') or {},
  116. identify_result
  117. )
  118. ok = affected is not None and affected > 0
  119. if ok:
  120. state["success"] += 1
  121. # 记录处理详情
  122. detail = {
  123. "index": current_index + 1,
  124. "dbInserted": ok,
  125. "identifyError": identify_result.get('error'),
  126. "status": "success" if ok else "failed"
  127. }
  128. state["details"].append(detail)
  129. state["status"] = "item_processed"
  130. logger.info(f"处理进度: {current_index + 1}/{len(items)} - {'成功' if ok else '失败'}")
  131. return state
  132. except Exception as e:
  133. logger.error(f"处理第 {current_index + 1} 项时出错: {e}")
  134. detail = {
  135. "index": current_index + 1,
  136. "dbInserted": False,
  137. "identifyError": str(e),
  138. "status": "error"
  139. }
  140. state["details"].append(detail)
  141. state["status"] = "item_error"
  142. return state
  143. def should_continue(state: AgentState) -> str:
  144. """判断是否继续处理"""
  145. if state.get("error"):
  146. return "end"
  147. current_index = state.get("current_index", 0)
  148. items = state.get("items", [])
  149. if current_index >= len(items):
  150. return "end"
  151. return "continue"
  152. # 构建工作流图
  153. workflow = StateGraph(AgentState)
  154. # 添加节点
  155. workflow.add_node("fetch_data", fetch_data)
  156. workflow.add_node("process_item", process_item)
  157. # 设置入口点
  158. workflow.set_entry_point("fetch_data")
  159. # 添加边
  160. workflow.add_edge("fetch_data", "process_item")
  161. workflow.add_conditional_edges(
  162. "process_item",
  163. should_continue,
  164. {
  165. "continue": "process_item",
  166. "end": END
  167. }
  168. )
  169. # 编译工作流
  170. return workflow.compile()
  171. # 全局工作流实例
  172. WORKFLOW = create_langgraph_workflow() if HAS_LANGGRAPH else None
  173. # =========================
  174. # FastAPI 接口定义
  175. # =========================
  176. @app.get("/")
  177. async def root():
  178. """根路径,返回服务信息"""
  179. return {
  180. "service": "Knowledge Agent API",
  181. "version": "2.0.0",
  182. "status": "running",
  183. "langgraph_enabled": HAS_LANGGRAPH,
  184. "endpoints": {
  185. "parse": "/parse",
  186. "parse/async": "/parse/async",
  187. "health": "/health",
  188. "docs": "/docs"
  189. }
  190. }
  191. @app.get("/health")
  192. async def health_check():
  193. """健康检查接口"""
  194. return {
  195. "status": "healthy",
  196. "timestamp": time.time(),
  197. "langgraph_enabled": HAS_LANGGRAPH
  198. }
  199. @app.post("/parse", response_model=TriggerResponse)
  200. async def parse_processing(request: TriggerRequest, background_tasks: BackgroundTasks):
  201. """
  202. 解析内容处理
  203. - **requestId**: 请求ID,用于标识处理任务
  204. """
  205. try:
  206. logger.info(f"收到解析请求: requestId={request.requestId}")
  207. if WORKFLOW and HAS_LANGGRAPH:
  208. # 使用 LangGraph 工作流
  209. logger.info("使用 LangGraph 工作流处理")
  210. # 初始化状态
  211. initial_state = AgentState(
  212. request_id=request.requestId,
  213. items=[],
  214. details=[],
  215. processed=0,
  216. success=0,
  217. current_index=0,
  218. current_item=None,
  219. identify_result=None,
  220. error=None,
  221. status="started"
  222. )
  223. # 执行工作流
  224. final_state = WORKFLOW.invoke(
  225. initial_state,
  226. config={"configurable": {"thread_id": f"thread_{request.requestId}"}}
  227. )
  228. # 构建响应
  229. result = TriggerResponse(
  230. requestId=request.requestId,
  231. processed=final_state.get("processed", 0),
  232. success=final_state.get("success", 0),
  233. details=final_state.get("details", [])
  234. )
  235. else:
  236. # 回退到传统模式
  237. logger.info("使用传统模式处理")
  238. # 获取待处理数据
  239. items = QueryDataTool.fetch_crawl_data_list(request.requestId)
  240. if not items:
  241. return TriggerResponse(
  242. requestId=request.requestId,
  243. processed=0,
  244. success=0,
  245. details=[]
  246. )
  247. # 处理数据
  248. success_count = 0
  249. details: List[Dict[str, Any]] = []
  250. for idx, item in enumerate(items, start=1):
  251. try:
  252. crawl_data = item.get('crawl_data') or {}
  253. # Step 1: 识别
  254. identify_result = identify_tool.run(
  255. crawl_data if isinstance(crawl_data, dict) else {}
  256. )
  257. # Step 2: 结构化并入库
  258. affected = StructureTool.store_parsing_result(
  259. request.requestId,
  260. item.get('raw') or {},
  261. identify_result
  262. )
  263. ok = affected is not None and affected > 0
  264. if ok:
  265. success_count += 1
  266. details.append({
  267. "index": idx,
  268. "dbInserted": ok,
  269. "identifyError": identify_result.get('error'),
  270. "status": "success" if ok else "failed"
  271. })
  272. except Exception as e:
  273. logger.error(f"处理第 {idx} 项时出错: {e}")
  274. details.append({
  275. "index": idx,
  276. "dbInserted": False,
  277. "identifyError": str(e),
  278. "status": "error"
  279. })
  280. result = TriggerResponse(
  281. requestId=request.requestId,
  282. processed=len(items),
  283. success=success_count,
  284. details=details
  285. )
  286. logger.info(f"处理完成: requestId={request.requestId}, processed={result.processed}, success={result.success}")
  287. return result
  288. except Exception as e:
  289. logger.error(f"处理请求失败: {e}")
  290. raise HTTPException(status_code=500, detail=f"处理失败: {str(e)}")
  291. @app.post("/parse/async")
  292. async def parse_processing_async(request: TriggerRequest, background_tasks: BackgroundTasks):
  293. """
  294. 异步解析内容处理(后台任务)
  295. - **requestId**: 请求ID,用于标识处理任务
  296. """
  297. try:
  298. logger.info(f"收到异步解析请求: requestId={request.requestId}")
  299. # 添加后台任务
  300. background_tasks.add_task(process_request_background, request.requestId)
  301. return {
  302. "requestId": request.requestId,
  303. "status": "processing",
  304. "message": "任务已提交到后台处理",
  305. "langgraph_enabled": HAS_LANGGRAPH
  306. }
  307. except Exception as e:
  308. logger.error(f"提交异步任务失败: {e}")
  309. raise HTTPException(status_code=500, detail=f"提交任务失败: {str(e)}")
  310. async def process_request_background(request_id: str):
  311. """后台处理请求"""
  312. try:
  313. logger.info(f"开始后台处理: requestId={request_id}")
  314. if WORKFLOW and HAS_LANGGRAPH:
  315. # 使用 LangGraph 工作流
  316. initial_state = AgentState(
  317. request_id=request_id,
  318. items=[],
  319. details=[],
  320. processed=0,
  321. success=0,
  322. current_index=0,
  323. current_item=None,
  324. identify_result=None,
  325. error=None,
  326. status="started"
  327. )
  328. final_state = WORKFLOW.invoke(
  329. initial_state,
  330. config={"configurable": {"thread_id": f"thread_{request_id}"}}
  331. )
  332. logger.info(f"LangGraph 后台处理完成: requestId={request_id}, processed={final_state.get('processed', 0)}, success={final_state.get('success', 0)}")
  333. else:
  334. # 传统模式
  335. items = QueryDataTool.fetch_crawl_data_list(request_id)
  336. if not items:
  337. logger.info(f"后台处理完成: requestId={request_id}, 无数据需要处理")
  338. return
  339. success_count = 0
  340. for idx, item in enumerate(items, start=1):
  341. try:
  342. crawl_data = item.get('crawl_data') or {}
  343. identify_result = identify_tool.run(
  344. crawl_data if isinstance(crawl_data, dict) else {}
  345. )
  346. affected = StructureTool.store_parsing_result(
  347. request_id,
  348. item.get('raw') or {},
  349. identify_result
  350. )
  351. if affected is not None and affected > 0:
  352. success_count += 1
  353. logger.info(f"后台处理进度: {idx}/{len(items)} - {'成功' if affected else '失败'}")
  354. except Exception as e:
  355. logger.error(f"后台处理第 {idx} 项时出错: {e}")
  356. logger.info(f"传统模式后台处理完成: requestId={request_id}, processed={len(items)}, success={success_count}")
  357. except Exception as e:
  358. logger.error(f"后台处理失败: requestId={request_id}, error={e}")
  359. if __name__ == "__main__":
  360. # 启动服务
  361. uvicorn.run(
  362. "agent:app",
  363. host="0.0.0.0",
  364. port=8080,
  365. reload=True, # 开发模式,自动重载
  366. log_level="info"
  367. )