丁云鹏 před 1 rokem
rodič
revize
02ec727ce3

+ 0 - 0
xiaoniangao/xiaoniangao_main/run_xng_h5.py → xiaoniangao/xiaoniangao_main/run_xngh5_category.py


+ 0 - 0
xiaoniangao/xiaoniangao_main/run_xng_h5_dev.py → xiaoniangao/xiaoniangao_main/run_xngh5_category_dev.py


+ 154 - 0
xiaoniangao/xiaoniangao_main/run_xngh5_recommend.py

@@ -0,0 +1,154 @@
+# -*- coding: utf-8 -*-
+# @Author: luojunhui
+# @Time: 2023/9/25
+import argparse
+import random
+from mq_http_sdk.mq_client import *
+from mq_http_sdk.mq_consumer import *
+from mq_http_sdk.mq_exception import MQExceptionBase
+
+sys.path.append(os.getcwd())
+from common.public import get_consumer, ack_message, task_fun_mq
+from common.common import Common
+from common.scheduling_db import MysqlHelper
+from xiaoniangao.xiaoniangao_xcx_rec.xiaoniangao_h5_schduling import (
+    XiaoNianGaoH5Scheduling,
+)
+
+
+def main(log_type, crawler, topic_name, group_id, env):
+    consumer = get_consumer(topic_name, group_id)
+    # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
+    # 长轮询时间3秒(最多可设置为30秒)。
+    wait_seconds = 30
+    # 一次最多消费3条(最多可设置为16条)。
+    batch = 1
+    Common.logger(log_type, crawler).info(
+        f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+        f"WaitSeconds:{wait_seconds}\n"
+        f"TopicName:{topic_name}\n"
+        f"MQConsumer:{group_id}"
+    )
+    Common.logging(
+        log_type,
+        crawler,
+        env,
+        f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+        f"WaitSeconds:{wait_seconds}\n"
+        f"TopicName:{topic_name}\n"
+        f"MQConsumer:{group_id}",
+    )
+    while True:
+        try:
+            # 长轮询消费消息。
+            recv_msgs = consumer.consume_message(batch, wait_seconds)
+            for msg in recv_msgs:
+                xng_h5_start_time = int(time.time())
+                Common.logger(log_type, crawler).info(
+                    f"Receive\n"
+                    f"MessageId:{msg.message_id}\n"
+                    f"MessageBodyMD5:{msg.message_body_md5}\n"
+                    f"MessageTag:{msg.message_tag}\n"
+                    f"ConsumedTimes:{msg.consumed_times}\n"
+                    f"PublishTime:{msg.publish_time}\n"
+                    f"Body:{msg.message_body}\n"
+                    f"NextConsumeTime:{msg.next_consume_time}\n"
+                    f"ReceiptHandle:{msg.receipt_handle}\n"
+                    f"Properties:{msg.properties}"
+                )
+                Common.logging(
+                    log_type,
+                    crawler,
+                    env,
+                    f"Receive\n"
+                    f"MessageId:{msg.message_id}\n"
+                    f"MessageBodyMD5:{msg.message_body_md5}\n"
+                    f"MessageTag:{msg.message_tag}\n"
+                    f"ConsumedTimes:{msg.consumed_times}\n"
+                    f"PublishTime:{msg.publish_time}\n"
+                    f"Body:{msg.message_body}\n"
+                    f"NextConsumeTime:{msg.next_consume_time}\n"
+                    f"ReceiptHandle:{msg.receipt_handle}\n"
+                    f"Properties:{msg.properties}",
+                )
+                # ack_mq_message
+                ack_message(
+                    log_type=log_type,
+                    crawler=crawler,
+                    recv_msgs=recv_msgs,
+                    consumer=consumer,
+                )
+
+                # 处理爬虫业务
+                task_dict = task_fun_mq(msg.message_body)["task_dict"]
+                rule_dict = task_fun_mq(msg.message_body)["rule_dict"]
+                task_id = task_dict["id"]
+                select_user_sql = (
+                    f"""select * from crawler_user_v3 where task_id={task_id}"""
+                )
+                user_list = MysqlHelper.get_values(
+                    log_type, crawler, select_user_sql, env, action=""
+                )
+                our_uid_list = []
+                for user in user_list:
+                    our_uid_list.append(user["uid"])
+                our_uid = random.choice(our_uid_list)
+                Common.logger(log_type, crawler).info(f"调度任务:{task_dict}")
+                Common.logging(log_type, crawler, env, f"调度任务:{task_dict}")
+                Common.logger(log_type, crawler).info(f"抓取规则:{rule_dict}")
+                Common.logging(log_type, crawler, env, f"抓取规则:{rule_dict}")
+                # Common.logger(log_type, crawler).info(f"用户列表:{user_list}\n")
+                Common.logger(log_type, crawler).info(f'开始抓取:{task_dict["taskName"]}\n')
+                Common.logging(
+                    log_type, crawler, env, f'开始抓取:{task_dict["taskName"]}\n'
+                )
+                XiaoNianGaoH5Scheduling.get_videoList(
+                    log_type=log_type,
+                    crawler=crawler,
+                    rule_dict=rule_dict,
+                    our_uid=our_uid,
+                    env=env,
+                )
+                # Common.del_logs(log_type, crawler)
+                Common.logger(log_type, crawler).info("抓取一轮结束\n")
+                Common.logging(log_type, crawler, env, "抓取一轮结束\n")
+                xng_h5_end_time = int(time.time())
+                xng_h5_duration = xng_h5_start_time - xng_h5_end_time
+                Common.logger(log_type, crawler).info(f"duration {xng_h5_duration}")
+                Common.logging(log_type, crawler, env, f"duration {xng_h5_duration}")
+        except MQExceptionBase as err:
+            # Topic中没有消息可消费。
+            if err.type == "MessageNotExist":
+                Common.logger(log_type, crawler).info(
+                    f"No new message! RequestId:{err.req_id}\n"
+                )
+                Common.logging(
+                    log_type, crawler, env, f"No new message! RequestId:{err.req_id}\n"
+                )
+                continue
+
+            Common.logger(log_type, crawler).info(
+                f"Consume Message Fail! Exception:{err}\n"
+            )
+            Common.logging(
+                log_type, crawler, env, f"Consume Message Fail! Exception:{err}\n"
+            )
+            time.sleep(2)
+            continue
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()  ## 新建参数解释器对象
+    parser.add_argument("--log_type", type=str)  ## 添加参数,注明参数类型
+    parser.add_argument("--crawler")  ## 添加参数
+    parser.add_argument("--topic_name")  ## 添加参数
+    parser.add_argument("--group_id")  ## 添加参数
+    parser.add_argument("--env")  ## 添加参数
+    args = parser.parse_args()  ### 参数赋值,也可以通过终端赋值
+    main(
+        log_type=args.log_type,
+        crawler=args.crawler,
+        topic_name=args.topic_name,
+        group_id=args.group_id,
+        env=args.env,
+    )

+ 28 - 0
xiaoniangao/xiaoniangao_main/run_xngh5_recommend_dev.py

@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+# @Author: luojunhui
+# @Time: 2023/9/25
+import os
+import sys
+
+sys.path.append(os.getcwd())
+from common.common import Common
+from xiaoniangao.xiaoniangao_xcx_rec.xiaoniangao_h5_schduling import XiaoNianGaoH5Scheduling
+
+
+def xiaoniangao_recommend_main(log_type, crawler, env):
+    Common.logger(log_type, crawler).info(f'开始抓取:小年糕H5\n')
+    # Common.logging(log_type, crawler, env, "开始抓取:小年糕H5\n")
+    XiaoNianGaoH5Scheduling.get_videoList(
+        log_type=log_type,
+        crawler=crawler,
+        rule_dict={"duration": {"min": 40, "max": 0},
+                   "play_cnt": {"min": 20000, "max": 0},
+                   "period": {"min": 60, "max": 60}},
+        our_uid=6267140,
+        env=env)
+    Common.logger(log_type, crawler).info("抓取一轮结束\n")
+    # Common.logging(log_type, crawler, env, "抓取一轮结束\n")
+
+
+if __name__ == "__main__":
+    xiaoniangao_recommend_main("h5-recommend", "xiaoniangao", "dev")