run_xng_hour.py 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/6/7
  4. import argparse
  5. import datetime
  6. import random
  7. import time
  8. from mq_http_sdk.mq_client import *
  9. from mq_http_sdk.mq_consumer import *
  10. from mq_http_sdk.mq_exception import MQExceptionBase
  11. sys.path.append(os.getcwd())
  12. from common.public import get_consumer, ack_message, task_fun_mq
  13. from common.common import Common
  14. from common.scheduling_db import MysqlHelper
  15. from xiaoniangao.xiaoniangao_hour.xiaoniangao_hour_scheduling import XiaoniangaoHourScheduling
  16. def main(log_type, crawler, topic_name, group_id, env):
  17. consumer = get_consumer(topic_name, group_id)
  18. # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
  19. # 长轮询时间3秒(最多可设置为30秒)。
  20. wait_seconds = 30
  21. # 一次最多消费3条(最多可设置为16条)。
  22. batch = 1
  23. Common.logger(log_type, crawler).info(f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  24. f'WaitSeconds:{wait_seconds}\n'
  25. f'TopicName:{topic_name}\n'
  26. f'MQConsumer:{group_id}')
  27. Common.logging(log_type, crawler, env, f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  28. f'WaitSeconds:{wait_seconds}\n'
  29. f'TopicName:{topic_name}\n'
  30. f'MQConsumer:{group_id}')
  31. while True:
  32. try:
  33. # 长轮询消费消息。
  34. recv_msgs = consumer.consume_message(batch, wait_seconds)
  35. for msg in recv_msgs:
  36. xng_hour_start_time = int(time.time())
  37. Common.logger(log_type, crawler).info(f"Receive\n"
  38. f"MessageId:{msg.message_id}\n"
  39. f"MessageBodyMD5:{msg.message_body_md5}\n"
  40. f"MessageTag:{msg.message_tag}\n"
  41. f"ConsumedTimes:{msg.consumed_times}\n"
  42. f"PublishTime:{msg.publish_time}\n"
  43. f"Body:{msg.message_body}\n"
  44. f"NextConsumeTime:{msg.next_consume_time}\n"
  45. f"ReceiptHandle:{msg.receipt_handle}\n"
  46. f"Properties:{msg.properties}")
  47. Common.logging(log_type, crawler, env, f"Receive\n"
  48. f"MessageId:{msg.message_id}\n"
  49. f"MessageBodyMD5:{msg.message_body_md5}\n"
  50. f"MessageTag:{msg.message_tag}\n"
  51. f"ConsumedTimes:{msg.consumed_times}\n"
  52. f"PublishTime:{msg.publish_time}\n"
  53. f"Body:{msg.message_body}\n"
  54. f"NextConsumeTime:{msg.next_consume_time}\n"
  55. f"ReceiptHandle:{msg.receipt_handle}\n"
  56. f"Properties:{msg.properties}")
  57. # ack_mq_message
  58. ack_message(log_type=log_type, crawler=crawler, recv_msgs=recv_msgs, consumer=consumer)
  59. # 处理爬虫业务
  60. task_dict = task_fun_mq(msg.message_body)['task_dict']
  61. rule_dict = task_fun_mq(msg.message_body)['rule_dict']
  62. task_id = task_dict['id']
  63. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  64. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  65. our_uid_list = []
  66. for user in user_list:
  67. our_uid_list.append(user["uid"])
  68. our_uid = random.choice(our_uid_list)
  69. Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
  70. Common.logging(log_type, crawler, env, f"调度任务:{task_dict}")
  71. Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
  72. Common.logging(log_type, crawler, env, f"抓取规则:{rule_dict}")
  73. Common.logger(log_type, crawler).info(f'开始抓取:{task_dict["taskName"]}\n')
  74. Common.logging(log_type, crawler, env, f'开始抓取:{task_dict["taskName"]}\n')
  75. # 获取符合规则的视频,写入小时级数据_feeds
  76. for i in range(1, 101):
  77. try:
  78. Common.logger(log_type, crawler).info(f"正在抓取第{i}页")
  79. Common.logging(log_type, crawler, env, f"正在抓取第{i}页")
  80. XiaoniangaoHourScheduling.get_videoList(log_type, crawler, rule_dict, env)
  81. except Exception as err:
  82. Common.logger(log_type, crawler).info(f"抓取第{i}页时异常:{err}\n")
  83. Common.logging(log_type, crawler, env, f"抓取第{i}页时异常:{err}\n")
  84. now = datetime.datetime.now()
  85. if now.hour == 10 and 0 <= now.minute <= 10:
  86. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  87. Common.logging(log_type, crawler, env, "开始更新/下载上升榜")
  88. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  89. crawler=crawler,
  90. rule_dict=rule_dict,
  91. our_uid=our_uid,
  92. env=env)
  93. elif now.hour == 15 and now.minute <= 10:
  94. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  95. Common.logging(log_type, crawler, env, "开始更新/下载上升榜")
  96. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  97. crawler=crawler,
  98. rule_dict=rule_dict,
  99. our_uid=our_uid,
  100. env=env)
  101. elif now.hour == 20 and now.minute <= 10:
  102. Common.logger(log_type, crawler).info("开始更新/下载上升榜")
  103. Common.logging(log_type, crawler, env, "开始更新/下载上升榜")
  104. XiaoniangaoHourScheduling.update_videoList(log_type=log_type,
  105. crawler=crawler,
  106. rule_dict=rule_dict,
  107. our_uid=our_uid,
  108. env=env)
  109. # Common.del_logs(log_type, crawler)
  110. Common.logger(log_type, crawler).info('抓取一轮结束\n')
  111. Common.logging(log_type, crawler, env, '抓取一轮结束\n')
  112. xng_hour_end_time = int(time.time())
  113. xng_hour_duration = xng_hour_start_time - xng_hour_end_time
  114. Common.logger(log_type, crawler).info(f"duration {xng_hour_duration}")
  115. Common.logging(log_type, crawler, env, f"duration {xng_hour_duration}")
  116. except MQExceptionBase as err:
  117. # Topic中没有消息可消费。
  118. if err.type == "MessageNotExist":
  119. Common.logger(log_type, crawler).info(f"No new message! RequestId:{err.req_id}\n")
  120. Common.logging(log_type, crawler, env, f"No new message! RequestId:{err.req_id}\n")
  121. continue
  122. Common.logger(log_type, crawler).info(f"Consume Message Fail! Exception:{err}\n")
  123. Common.logging(log_type, crawler, env, f"Consume Message Fail! Exception:{err}\n")
  124. time.sleep(2)
  125. continue
  126. if __name__ == "__main__":
  127. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  128. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  129. parser.add_argument('--crawler') ## 添加参数
  130. parser.add_argument('--topic_name') ## 添加参数
  131. parser.add_argument('--group_id') ## 添加参数
  132. parser.add_argument('--env') ## 添加参数
  133. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  134. main(log_type=args.log_type,
  135. crawler=args.crawler,
  136. topic_name=args.topic_name,
  137. group_id=args.group_id,
  138. env=args.env)