main_dev_version.py 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596
  1. """
  2. Created on January 4, 2024,
  3. @author: luojunhui
  4. description: 测试版本
  5. """
  6. import asyncio
  7. import json
  8. from mq_http_sdk.mq_consumer import *
  9. from mq_http_sdk.mq_exception import MQExceptionBase
  10. sys.path.append(os.getcwd())
  11. from application.common import get_consumer, ack_message
  12. from application.config import TopicGroup
  13. async def run(task_id, mode, platform):
  14. """
  15. 传入参数,然后根据参数执行爬虫代码
  16. :param task_id: 任务id
  17. :param mode: 任务类型
  18. :param platform: 哪个抓取平台
  19. :return: None
  20. """
  21. # 创建一个aliyun日志对象
  22. message = "{}: 开始一轮抓取".format(platform)
  23. print(message)
  24. # 创建并一个子进程
  25. await asyncio.create_subprocess_shell(
  26. "python3 scheduler/run_spider_online.py --task_id {} --mode {} --platform {}".format(task_id, mode, platform)
  27. )
  28. print("successfully run spider")
  29. async def consume_single_message(spider):
  30. """
  31. 消费单个消息,若消费成功则启动爬虫新协程;
  32. :param spider: 爬虫类
  33. """
  34. topic = spider['topic']
  35. group = spider['group']
  36. platform = spider['platform']
  37. mode = spider['mode']
  38. consumer = get_consumer(topic, group)
  39. try:
  40. messages = consumer.consume_message(wait_seconds=10, batch_size=1)
  41. if messages:
  42. # 在这里消费消息,做一些数据处理分析
  43. for single_message in messages:
  44. ack_message(
  45. mode=mode,
  46. platform=platform,
  47. recv_msgs=messages,
  48. consumer=consumer
  49. )
  50. message="successfully consumed message"
  51. print(message)
  52. message_body = single_message.message_body
  53. task_id = json.loads(message_body)['id']
  54. # 创建爬虫task
  55. await asyncio.create_task(run(task_id, spider['mode'], spider['platform']))
  56. message="successfully created task"
  57. print(message)
  58. else:
  59. message="Messages Queue is Empty"
  60. print(message)
  61. except MQExceptionBase as err:
  62. # Topic中没有消息可消费。
  63. if err.type == "MessageNotExist":
  64. message = "No new message! RequestId:{}\n".format(err.req_id)
  65. print(message)
  66. else:
  67. message = "Consume Message Fail! Exception:{}\n".format(err)
  68. print(message)
  69. async def main():
  70. """
  71. 主函数
  72. """
  73. spider_list = TopicGroup().produce()
  74. while spider_list:
  75. async_tasks = []
  76. for spider in spider_list:
  77. task = asyncio.create_task(consume_single_message(spider))
  78. async_tasks.append(task)
  79. await asyncio.gather(*async_tasks)
  80. # await asyncio.sleep(60) # 每分钟接收一次MQ,
  81. if __name__ == '__main__':
  82. # 运行主事件循环
  83. asyncio.run(main())