import asyncio import json from mq_http_sdk.mq_consumer import * from mq_http_sdk.mq_exception import MQExceptionBase sys.path.append(os.getcwd()) from application.common import AliyunLogger, get_consumer, ack_message from application.config import TopicGroup async def run(task_id, mode, platform): """ 传入参数,然后根据参数执行爬虫代码 :param task_id: 任务id :param mode: 任务类型 :param platform: 哪个抓取平台 :return: None """ # 创建一个aliyun日志对象 logger = AliyunLogger(platform=platform, mode=mode) logger.logging( code=1003, message="{}: 开始一轮抓取".format(platform) ) # 创建并一个子进程 await asyncio.create_subprocess_shell( "python3 scheduler/run_spider_online.py --task_id {} --mode {} --platform {}".format(task_id, mode, platform) ) async def consume_single_message(spider): """ 消费单个消息,若消费成功则启动爬虫新协程; :param spider: 爬虫类 """ topic = spider['topic'] group = spider['group'] consumer = get_consumer(topic, group) try: messages = consumer.consume_message(wait_seconds=10, batch_size=1) if messages: # 在这里消费消息,做一些数据处理分析 for single_message in messages: ack_message(mode=spider['mode'], platform=spider['platform'], recv_msgs=messages, consumer=consumer) message_body = single_message.message_body task_id = json.loads(message_body)['id'] print("成功消费消息,正在准备启动爬虫任务") print(message_body) # 创建爬虫task await asyncio.create_task(run(task_id, spider['mode'], spider['platform'])) print("爬虫任务启动完成") else: message = "Messages Queue is Empty" print(message) except MQExceptionBase as err: # Topic中没有消息可消费。 if err.type == "MessageNotExist": message = f"No new message! RequestId:{err.req_id}\n" print(message) else: message = f"Consume Message Fail! Exception:{err}\n" print(message) async def main(): """ 主函数 """ spider_list = TopicGroup().produce() while spider_list: async_tasks = [] for spider in spider_list: task = asyncio.create_task(consume_single_message(spider)) async_tasks.append(task) await asyncio.gather(*async_tasks) await asyncio.sleep(60) # 每分钟接收一次MQ, if __name__ == '__main__': # 运行主事件循环 asyncio.run(main())