run_xngrule_recommend.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159
  1. # -*- coding: utf-8 -*-
  2. # @Author: luojunhui
  3. # @Time: 2023/9/27
  4. import argparse
  5. import random
  6. from mq_http_sdk.mq_client import *
  7. from mq_http_sdk.mq_consumer import *
  8. from mq_http_sdk.mq_exception import MQExceptionBase
  9. import multiprocessing
  10. sys.path.append(os.getcwd())
  11. from common.public import get_consumer, ack_message, task_fun_mq, get_rule_from_mysql
  12. from common.common import Common
  13. from common.scheduling_db import MysqlHelper
  14. from xiaoniangaoplus.xiaoniangaoplus.xiaoniangao_plus_rule import XiaoNianGaoPlusRecommend
  15. def run(args1, args2, args3, args4, args5):
  16. XiaoNianGaoPlusRecommend(
  17. log_type=args1,
  18. crawler=args2,
  19. env=args3,
  20. rule_dict=args4,
  21. our_uid=args5
  22. )
  23. def main(log_type, crawler, topic_name, group_id, env):
  24. consumer = get_consumer(topic_name, group_id)
  25. # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
  26. # 长轮询时间3秒(最多可设置为30秒)。
  27. wait_seconds = 30
  28. # 一次最多消费3条(最多可设置为16条)。
  29. batch = 1
  30. Common.logger(log_type, crawler).info(f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  31. f'WaitSeconds:{wait_seconds}\n'
  32. f'TopicName:{topic_name}\n'
  33. f'MQConsumer:{group_id}')
  34. Common.logging(log_type, crawler, env, f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
  35. f'WaitSeconds:{wait_seconds}\n'
  36. f'TopicName:{topic_name}\n'
  37. f'MQConsumer:{group_id}')
  38. while True:
  39. try:
  40. # 长轮询消费消息。
  41. recv_msgs = consumer.consume_message(batch, wait_seconds)
  42. for msg in recv_msgs:
  43. xng_play_start_time = int(time.time())
  44. Common.logger(log_type, crawler).info(f"Receive\n"
  45. f"MessageId:{msg.message_id}\n"
  46. f"MessageBodyMD5:{msg.message_body_md5}\n"
  47. f"MessageTag:{msg.message_tag}\n"
  48. f"ConsumedTimes:{msg.consumed_times}\n"
  49. f"PublishTime:{msg.publish_time}\n"
  50. f"Body:{msg.message_body}\n"
  51. f"NextConsumeTime:{msg.next_consume_time}\n"
  52. f"ReceiptHandle:{msg.receipt_handle}\n"
  53. f"Properties:{msg.properties}")
  54. Common.logging(log_type, crawler, env, f"Receive\n"
  55. f"MessageId:{msg.message_id}\n"
  56. f"MessageBodyMD5:{msg.message_body_md5}\n"
  57. f"MessageTag:{msg.message_tag}\n"
  58. f"ConsumedTimes:{msg.consumed_times}\n"
  59. f"PublishTime:{msg.publish_time}\n"
  60. f"Body:{msg.message_body}\n"
  61. f"NextConsumeTime:{msg.next_consume_time}\n"
  62. f"ReceiptHandle:{msg.receipt_handle}\n"
  63. f"Properties:{msg.properties}")
  64. # ack_mq_message
  65. ack_message(log_type=log_type, crawler=crawler, recv_msgs=recv_msgs, consumer=consumer)
  66. # 处理爬虫业务
  67. task_dict = task_fun_mq(msg.message_body)['task_dict']
  68. rule_dict = task_fun_mq(msg.message_body)['rule_dict']
  69. task_id = task_dict['id']
  70. select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
  71. user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
  72. our_uid_list = []
  73. for user in user_list:
  74. our_uid_list.append(user["uid"])
  75. our_uid = random.choice(our_uid_list)
  76. Common.logger(log_type, crawler).info(f"调度任务:{task_dict}")
  77. Common.logging(log_type, crawler, env, f"调度任务:{task_dict}")
  78. # Common.logger(log_type, crawler).info(f"抓取规则:{rule_dict}")
  79. # Common.logging(log_type, crawler, env, f"抓取规则:{rule_dict}")
  80. Common.logger(log_type, crawler).info(f"用户列表:{user_list}\n")
  81. Common.logger(log_type, crawler).info(f'开始抓取:{task_dict["taskName"]}\n')
  82. Common.logging(log_type, crawler, env, f'开始抓取:{task_dict["taskName"]}\n')
  83. new_r = get_rule_from_mysql(task_id=task_id, log_type=log_type, crawler=crawler, env=env)
  84. # Common.logger(log_type, crawler).info(f'rule_dict:{new_r}\n')
  85. r_d = {}
  86. for item in new_r:
  87. for k, val in item.items():
  88. r_d[k] = val
  89. Common.logger(log_type, crawler).info(f"抓取规则:{r_d}")
  90. Common.logging(log_type, crawler, env, f"抓取规则:{r_d}")
  91. process = multiprocessing.Process(
  92. target=run,
  93. args=(log_type, crawler, env, r_d, our_uid)
  94. )
  95. process.start()
  96. print("进程开始")
  97. while True:
  98. if not process.is_alive():
  99. print("正在重启")
  100. process.terminate()
  101. os.system("adb forward --remove-all")
  102. time.sleep(60)
  103. new_r = get_rule_from_mysql(task_id=task_id, log_type=log_type, crawler=crawler, env=env)
  104. r_d = {}
  105. for item in new_r:
  106. for k, val in item.items():
  107. r_d[k] = val
  108. Common.logger(log_type, crawler).info(f'抓取规则:{r_d}')
  109. Common.logging(log_type, crawler, env, f"抓取规则:{r_d}")
  110. process = multiprocessing.Process(target=run, args=(log_type, crawler, env, r_d, our_uid))
  111. process.start()
  112. time.sleep(60)
  113. # XiaoNianGaoPlusRecommend.start_wechat(log_type=log_type,
  114. # crawler=crawler,
  115. # rule_dict=rule_dict,
  116. # our_uid=our_uid,
  117. # env=env)git
  118. # Common.del_logs(log_type, crawler)
  119. # Common.logger(log_type, crawler).info('抓取一轮结束\n')
  120. # Common.logging(log_type, crawler, env, '抓取一轮结束\n')
  121. # xng_play_end_time = int(time.time())
  122. # xng_play_duration = xng_play_start_time - xng_play_end_time
  123. # Common.logger(log_type, crawler).info(f"duration {xng_play_duration}")
  124. # Common.logging(log_type, crawler, env, f"duration {xng_play_duration}")
  125. except MQExceptionBase as err:
  126. # Topic中没有消息可消费。
  127. if err.type == "MessageNotExist":
  128. Common.logger(log_type, crawler).info(f"No new message! RequestId:{err.req_id}\n")
  129. Common.logging(log_type, crawler, env, f"No new message! RequestId:{err.req_id}\n")
  130. continue
  131. Common.logger(log_type, crawler).info(f"Consume Message Fail! Exception:{err}\n")
  132. Common.logging(log_type, crawler, env, f"Consume Message Fail! Exception:{err}\n")
  133. time.sleep(2)
  134. continue
  135. if __name__ == "__main__":
  136. parser = argparse.ArgumentParser() ## 新建参数解释器对象
  137. parser.add_argument('--log_type', type=str) ## 添加参数,注明参数类型
  138. parser.add_argument('--crawler') ## 添加参数
  139. parser.add_argument('--topic_name') ## 添加参数
  140. parser.add_argument('--group_id') ## 添加参数
  141. parser.add_argument('--env') ## 添加参数
  142. args = parser.parse_args() ### 参数赋值,也可以通过终端赋值
  143. main(log_type=args.log_type,
  144. crawler=args.crawler,
  145. topic_name=args.topic_name,
  146. group_id=args.group_id,
  147. env=args.env)