run_sph_author.py 6.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/6/30
  4. import argparse
  5. from multiprocessing import Process
  6. from mq_http_sdk.mq_client import *
  7. from mq_http_sdk.mq_consumer import *
  8. from mq_http_sdk.mq_exception import MQExceptionBase
  9. sys.path.append(os.getcwd())
  10. from common.public import task_fun_mq, get_consumer, ack_message
  11. from common.scheduling_db import MysqlHelper
  12. from common import AliyunLogger
  13. from shipinhao.shipinhao_author import ShiPinHaoAccount
  14. def main(log_type, crawler, topic_name, group_id, env):
  15. consumer = get_consumer(topic_name, group_id)
  16. # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
  17. # 长轮询时间3秒(最多可设置为30秒)。
  18. wait_seconds = 30
  19. # 一次最多消费3条(最多可设置为16条)。
  20. batch = 1
  21. AliyunLogger.logging(
  22. code="1000",
  23. platform=crawler,
  24. mode=log_type,
  25. env=env,
  26. message=f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  27. f"WaitSeconds:{wait_seconds}\n"
  28. f"TopicName:{topic_name}\n"
  29. f"MQConsumer:{group_id}",
  30. )
  31. while True:
  32. try:
  33. # 长轮询消费消息。
  34. recv_msgs = consumer.consume_message(batch, wait_seconds)
  35. for msg in recv_msgs:
  36. AliyunLogger.logging(
  37. code="1000",
  38. platform=crawler,
  39. mode=log_type,
  40. env=env,
  41. message=f"Receive\n"
  42. f"MessageId:{msg.message_id}\n"
  43. f"MessageBodyMD5:{msg.message_body_md5}\n"
  44. f"MessageTag:{msg.message_tag}\n"
  45. f"ConsumedTimes:{msg.consumed_times}\n"
  46. f"PublishTime:{msg.publish_time}\n"
  47. f"Body:{msg.message_body}\n"
  48. f"NextConsumeTime:{msg.next_consume_time}\n"
  49. f"ReceiptHandle:{msg.receipt_handle}\n"
  50. f"Properties:{msg.properties}",
  51. )
  52. # ack_mq_message
  53. ack_message(
  54. log_type=log_type,
  55. crawler=crawler,
  56. recv_msgs=recv_msgs,
  57. consumer=consumer,
  58. )
  59. # 解析 task_dict
  60. task_dict = task_fun_mq(msg.message_body)["task_dict"]
  61. AliyunLogger.logging(
  62. code="1000",
  63. platform=crawler,
  64. mode=log_type,
  65. env=env,
  66. message="f调度任务:{task_dict}",
  67. )
  68. # 解析 rule_dict
  69. rule_dict = task_fun_mq(msg.message_body)["rule_dict"]
  70. AliyunLogger.logging(
  71. code="1000",
  72. platform=crawler,
  73. mode=log_type,
  74. env=env,
  75. message=f"抓取规则:{rule_dict}\n",
  76. )
  77. # 解析 user_list
  78. task_id = task_dict["id"]
  79. select_user_sql = (
  80. f"""select * from crawler_user_v3 where task_id={task_id}"""
  81. )
  82. user_list = MysqlHelper.get_values(
  83. log_type, crawler, select_user_sql, env, action=""
  84. )
  85. AliyunLogger.logging(
  86. code="1003",
  87. platform=crawler,
  88. mode=log_type,
  89. env=env,
  90. message="开始抓取"
  91. )
  92. for user_dict in user_list:
  93. try:
  94. AliyunLogger.logging(
  95. code="1000",
  96. platform=crawler,
  97. mode=log_type,
  98. env=env,
  99. message="开始抓取视频号{}".format(user_dict["name"]),
  100. )
  101. # 初始化
  102. SPHA = ShiPinHaoAccount(
  103. platform=crawler,
  104. mode=log_type,
  105. rule_dict=rule_dict,
  106. user_dict=user_dict,
  107. env=env,
  108. )
  109. SPHA.get_account_videos()
  110. AliyunLogger.logging(
  111. code="1000",
  112. platform=crawler,
  113. mode=log_type,
  114. env=env,
  115. message="完成抓取视频号{}".format(user_dict["name"]),
  116. )
  117. except Exception as e:
  118. AliyunLogger.logging(
  119. code="3000",
  120. platform=crawler,
  121. mode=log_type,
  122. env=env,
  123. message="抓取视频号{}出现问题, 报错为{}".format(user_dict["name"], e),
  124. )
  125. AliyunLogger.logging(
  126. code="1004", platform=crawler, mode=log_type, message="结束一轮抓取"
  127. )
  128. except MQExceptionBase as err:
  129. # Topic中没有消息可消费。
  130. if err.type == "MessageNotExist":
  131. AliyunLogger.logging(
  132. code="2000",
  133. platform=crawler,
  134. mode=log_type,
  135. env=env,
  136. message=f"No new message! RequestId:{err.req_id}\n",
  137. )
  138. continue
  139. AliyunLogger.logging(
  140. code="2000",
  141. platform=crawler,
  142. mode=log_type,
  143. env=env,
  144. message=f"Consume Message Fail! Exception:{err}\n",
  145. )
  146. time.sleep(2)
  147. continue
  148. if __name__ == "__main__":
  149. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  150. parser.add_argument("--log_type", type=str) ## 添加参数,注明参数类型
  151. parser.add_argument("--crawler") ## 添加参数
  152. parser.add_argument("--topic_name") ## 添加参数
  153. parser.add_argument("--group_id") ## 添加参数
  154. parser.add_argument("--env") ## 添加参数
  155. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  156. main(
  157. log_type=args.log_type,
  158. crawler=args.crawler,
  159. topic_name=args.topic_name,
  160. group_id=args.group_id,
  161. env=args.env,
  162. )