async_consumer.py 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121
  1. import asyncio
  2. import json
  3. import traceback
  4. from typing import List
  5. import signal
  6. from core.utils.log.logger_manager import LoggerManager
  7. from core.utils.trace_utils import generate_trace_id
  8. from services.async_mysql_service import AsyncMysqlService
  9. from spiders.spider_registry import get_spider_class
  10. async def async_handle_topic(topic: str,stop_event: asyncio.Event):
  11. """
  12. 单个 topic 的消费逻辑,运行在协程中:
  13. - 从 MQ 中消费消息;
  14. - 根据消息内容执行对应爬虫;
  15. - 使用异步数据库服务查询配置;
  16. - 记录日志、确认消息。
  17. """
  18. logger = LoggerManager.get_logger()
  19. aliyun_logger = LoggerManager.get_aliyun_logger()
  20. # 每个 topic 创建独立的 consumer 实例
  21. from services.rocketmq_consumer import AsyncRocketMQConsumer
  22. consumer = AsyncRocketMQConsumer(topic_name=topic, group_id=topic)
  23. async def handle_single_message(message):
  24. trace_id = generate_trace_id()
  25. try:
  26. payload = json.loads(message.message_body)
  27. task_id = payload["id"]
  28. logger.info(f"{trace_id} - 接收到任务消息: {task_id}")
  29. async with AsyncMysqlService("system", "crawler") as mysql:
  30. user_list = await mysql.get_user_list(task_id)
  31. rule_dict = await mysql.get_rule_dict(task_id)
  32. CrawlerClass = get_spider_class(topic)
  33. crawler = CrawlerClass(
  34. rule_dict=rule_dict,
  35. user_list=user_list,
  36. trace_id=trace_id
  37. )
  38. await crawler.run()
  39. # ack 由 run 成功后执行
  40. await consumer.ack_message(message.receipt_handle)
  41. logger.info(f"{trace_id} - 任务 {task_id} 执行成功并已 Ack")
  42. aliyun_logger.logging(
  43. code="2000",
  44. message="任务执行成功",
  45. trace_id=trace_id,
  46. data={
  47. "task_id": task_id,
  48. "topic": topic
  49. }
  50. )
  51. except Exception as e:
  52. logger.error(f"{trace_id} - 任务处理失败: {e} /n traceback.format_exc()")
  53. aliyun_logger.logging(
  54. code="9001",
  55. message=f"处理消息失败: {str(e)}",
  56. trace_id=trace_id,
  57. data={
  58. "error_type": type(e).__name__,
  59. "stack_trace": traceback.format_exc(),
  60. "message_body": message.message_body
  61. }
  62. )
  63. # 自动重启消费循环
  64. while not stop_event.is_set():
  65. try:
  66. await consumer.run_forever(handle_single_message)
  67. except Exception as e:
  68. aliyun_logger.logging(
  69. code="9002",
  70. message=f"{topic} 消费循环异常即将重启: {str(e)}",
  71. data={
  72. "error_type": type(e).__name__,
  73. "stack_trace": traceback.format_exc(),
  74. }
  75. )
  76. logger.warning(f"[{topic}] 消费循环异常: {e},5秒后重启")
  77. await asyncio.sleep(5)
  78. async def run_all_topics(topics: List[str]):
  79. stop_event = asyncio.Event()
  80. loop = asyncio.get_running_loop()
  81. def shutdown():
  82. print("[系统] 收到停止信号,准备优雅退出...")
  83. stop_event.set()
  84. for sig in [signal.SIGINT, signal.SIGTERM]:
  85. loop.add_signal_handler(sig, shutdown)
  86. tasks = [asyncio.create_task(async_handle_topic(topic, stop_event)) for topic in topics]
  87. await stop_event.wait() # 等待停止信号
  88. print("[系统] 正在取消所有消费任务...")
  89. for task in tasks:
  90. task.cancel()
  91. results = await asyncio.gather(*tasks, return_exceptions=True)
  92. for idx, result in enumerate(results):
  93. if isinstance(result, Exception):
  94. print(f"[系统] 任务 {topics[idx]} 异常退出: {result}")
  95. print("[系统] 所有任务已退出,进程关闭")
  96. def handle_topic_group(topics: List[str]):
  97. """子进程入口函数:启动异步事件循环处理该组 topics。"""
  98. asyncio.run(run_all_topics(topics))