run_dy_author.py 4.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/6/7
  4. import argparse
  5. from mq_http_sdk.mq_client import *
  6. from mq_http_sdk.mq_consumer import *
  7. from mq_http_sdk.mq_exception import MQExceptionBase
  8. sys.path.append(os.getcwd())
  9. from common.common import Common
  10. from common.public import get_consumer, ack_message, task_fun_mq
  11. from common.scheduling_db import MysqlHelper
  12. from douyin.douyin_author.douyin_author_scheduling import DouyinauthorScheduling
  13. def main(log_type, crawler, topic_name, group_id, env):
  14. consumer = get_consumer(topic_name, group_id)
  15. # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
  16. # 长轮询时间3秒(最多可设置为30秒)。
  17. wait_seconds = 3
  18. # 一次最多消费3条(最多可设置为16条)。
  19. batch = 1
  20. Common.logger(log_type, crawler).info(f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  21. f'WaitSeconds:{wait_seconds}\n'
  22. f'TopicName:{topic_name}\n'
  23. f'MQConsumer:{group_id}')
  24. while True:
  25. try:
  26. # 长轮询消费消息。
  27. recv_msgs = consumer.consume_message(batch, wait_seconds)
  28. for msg in recv_msgs:
  29. Common.logger(log_type, crawler).info(f"Receive\n"
  30. f"MessageId:{msg.message_id}\n"
  31. f"MessageBodyMD5:{msg.message_body_md5}\n"
  32. f"MessageTag:{msg.message_tag}\n"
  33. f"ConsumedTimes:{msg.consumed_times}\n"
  34. f"PublishTime:{msg.publish_time}\n"
  35. f"Body:{msg.message_body}\n"
  36. f"NextConsumeTime:{msg.next_consume_time}\n"
  37. f"ReceiptHandle:{msg.receipt_handle}\n"
  38. f"Properties:{msg.properties}")
  39. # ack_mq_message
  40. ack_message(log_type=log_type, crawler=crawler, recv_msgs=recv_msgs, consumer=consumer)
  41. # 处理爬虫业务
  42. task_dict = task_fun_mq(msg.message_body)['task_dict']
  43. rule_dict = task_fun_mq(msg.message_body)['rule_dict']
  44. task_id = task_dict['id']
  45. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  46. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  47. Common.logger(log_type, crawler).info(f"调度任务:{task_dict}")
  48. Common.logger(log_type, crawler).info(f"抓取规则:{rule_dict}")
  49. # Common.logger(log_type, crawler).info(f"用户列表:{user_list}\n")
  50. Common.logger(log_type, crawler).info(f'开始抓取 {task_dict["taskName"]}\n')
  51. DouyinauthorScheduling.get_author_videos(log_type=log_type,
  52. crawler=crawler,
  53. rule_dict=rule_dict,
  54. user_list=user_list,
  55. env=env)
  56. Common.del_logs(log_type, crawler)
  57. Common.logger(log_type, crawler).info('抓取一轮结束\n')
  58. except MQExceptionBase as err:
  59. # Topic中没有消息可消费。
  60. if err.type == "MessageNotExist":
  61. Common.logger(log_type, crawler).info(f"No new message! RequestId:{err.req_id}\n")
  62. continue
  63. Common.logger(log_type, crawler).info(f"Consume Message Fail! Exception:{err}\n")
  64. time.sleep(2)
  65. continue
  66. if __name__ == "__main__":
  67. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  68. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  69. parser.add_argument('--crawler') ## 添加参数
  70. parser.add_argument('--topic_name') ## 添加参数
  71. parser.add_argument('--group_id') ## 添加参数
  72. parser.add_argument('--env') ## 添加参数
  73. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  74. main(log_type=args.log_type,
  75. crawler=args.crawler,
  76. topic_name=args.topic_name,
  77. group_id=args.group_id,
  78. env=args.env)