async_consumer.py 6.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. import asyncio
  2. import json
  3. import traceback
  4. from typing import List
  5. import signal
  6. from core.utils.log.logger_manager import LoggerManager
  7. from core.utils.trace_utils import generate_trace_id, TraceContext
  8. from services.async_mysql_service import AsyncMysqlService
  9. from spiders.spider_registry import get_spider_class
  10. logger = LoggerManager.get_logger()
  11. aliyun_logger = LoggerManager.get_aliyun_logger()
  12. async def async_handle_topic(topic: str, stop_event: asyncio.Event):
  13. """
  14. 单个 topic 的消费逻辑,运行在协程中:
  15. - 从 MQ 中消费消息(单条处理,处理完再拉取下一条);
  16. - 根据消息内容执行对应爬虫;
  17. - 使用异步数据库服务查询配置;
  18. - 记录日志、确认消息。
  19. """
  20. # 每个 topic 创建独立的 consumer 实例(使用优化后的 AsyncRocketMQConsumer)
  21. from services.async_mq_consumer import AsyncRocketMQConsumer
  22. consumer = AsyncRocketMQConsumer(topic_name=topic, group_id=topic)
  23. async def handle_single_message(message):
  24. """处理单条消息的业务逻辑(不含拉取和循环)"""
  25. with TraceContext() as trace_id: # 生成 trace_id 并绑定到上下文
  26. try:
  27. payload = json.loads(message.message_body)
  28. task_id = payload["id"]
  29. logger.info(f"[{topic}]接收到任务消息: {task_id}")
  30. # 确认消息(单条消息处理成功后才 Ack)
  31. await consumer.ack_message(message.receipt_handle)
  32. logger.info(f"[{topic}]任务 {task_id} 已 Ack")
  33. aliyun_logger.logging(
  34. code="1000",
  35. message="任务接收成功",
  36. data=payload,
  37. account=topic
  38. )
  39. # 从数据库查询配置
  40. async with AsyncMysqlService() as mysql:
  41. user_list = await mysql.get_user_list(task_id)
  42. rule_dict = await mysql.get_rule_dict(task_id)
  43. # 执行爬虫任务
  44. CrawlerClass = get_spider_class(topic)
  45. crawler = CrawlerClass(
  46. rule_dict=rule_dict,
  47. user_list=user_list,
  48. )
  49. await crawler.run() # 爬虫成功执行后再确认消息
  50. logger.info(f"[{topic}]任务 {task_id} 执行成功")
  51. aliyun_logger.logging(
  52. code="1010",
  53. message="任务执行成功",
  54. data={"task_id": task_id, "topic": topic},
  55. account=topic
  56. )
  57. except Exception as e:
  58. logger.error(f"[{topic}]任务处理失败: {e} \n {traceback.format_exc()}")
  59. aliyun_logger.logging(
  60. code="9001",
  61. message=f"处理消息失败: {str(e)} \n {traceback.format_exc()}",
  62. data={
  63. "error_type": type(e).__name__,
  64. "stack_trace": traceback.format_exc(),
  65. "message_body": message.message_body
  66. },
  67. account=topic
  68. )
  69. # 处理失败不 Ack,消息会被 MQ 重新投递(依赖 MQ 的重试机制)
  70. # 独立的消费循环:拉取消息并调用处理函数
  71. async def consume_loop():
  72. logger.info(f"[{topic}] 启动消费循环,开始拉取消息...")
  73. while not stop_event.is_set(): # 监听停止信号,支持优雅退出
  74. try:
  75. # 拉取单条消息(依赖优化后的 receive_message,无消息时返回 None 不报错)
  76. message = await consumer.receive_message()
  77. if message:
  78. # 有消息则处理,处理完成后再进入下一次循环
  79. await handle_single_message(message)
  80. else:
  81. # 无消息时短暂休眠,避免频繁空轮询
  82. await asyncio.sleep(1)
  83. except Exception as e:
  84. # 非消息处理的异常(如 MQ 连接失败),记录并重试
  85. logger.error(f"[{topic}] 消费循环异常: {e}", exc_info=True)
  86. aliyun_logger.logging(
  87. code="9002",
  88. message=f"{topic} 消费循环异常,即将重试: {str(e)}",
  89. data={"error_type": type(e).__name__, "stack_trace": traceback.format_exc()},
  90. account=topic
  91. )
  92. await asyncio.sleep(5) # 异常后延迟重试,减轻服务压力
  93. logger.info(f"[{topic}] 消费循环已停止(收到退出信号)")
  94. # 启动消费循环(这是消费逻辑的入口)
  95. await consume_loop()
  96. async def run_all_topics(topics: List[str]):
  97. stop_event = asyncio.Event()
  98. loop = asyncio.get_running_loop()
  99. def shutdown():
  100. """处理停止信号(如 Ctrl+C),触发优雅退出"""
  101. logger.warning("[系统] 收到停止信号,准备优雅退出...")
  102. aliyun_logger.logging(
  103. code="1600",
  104. message="[系统] 收到停止信号,准备优雅退出...",
  105. )
  106. stop_event.set()
  107. # 注册信号处理(支持 Ctrl+C 和 kill 命令)
  108. for sig in [signal.SIGINT, signal.SIGTERM]:
  109. loop.add_signal_handler(sig, shutdown)
  110. # 为每个 topic 创建独立协程任务
  111. tasks = [asyncio.create_task(async_handle_topic(topic, stop_event)) for topic in topics]
  112. await stop_event.wait() # 等待退出信号
  113. # 取消所有任务并等待结束
  114. logger.warning(f"[系统] 正在取消所有消费任务...")
  115. for task in tasks:
  116. task.cancel()
  117. # 收集任务结果,忽略取消异常
  118. results = await asyncio.gather(*tasks, return_exceptions=True)
  119. for idx, result in enumerate(results):
  120. if isinstance(result, Exception) and not isinstance(result, asyncio.CancelledError):
  121. logger.error(f"[系统] 任务 {topics[idx]} 异常退出: {result}")
  122. logger.warning(f"[系统] 所有任务已退出,进程已关闭...")
  123. aliyun_logger.logging(
  124. code="1602",
  125. message="[系统] 所有任务已退出,进程已关闭...",
  126. data={"task_count": len(tasks)}
  127. )
  128. def handle_topic_group(topics: List[str]):
  129. """子进程入口函数:启动异步事件循环处理该组 topics。"""
  130. asyncio.run(run_all_topics(topics))