Kaynağa Gözat

小年糕——V2, youlegaoxiaoxiaoshipin_test

罗俊辉 1 yıl önce
ebeveyn
işleme
c4a978adc2

+ 116 - 65
xiaoniangao/xiaoniangao_main/run_xng_author.py

@@ -1,29 +1,37 @@
 # -*- coding: utf-8 -*-
-# @Author: wangkun
-# @Time: 2023/6/7
+# @Author: luojunhui
+# @Time: 2023/10/23
 import argparse
 from mq_http_sdk.mq_client import *
 from mq_http_sdk.mq_consumer import *
 from mq_http_sdk.mq_exception import MQExceptionBase
+
 sys.path.append(os.getcwd())
-from common.public import get_consumer, ack_message, task_fun_mq
 from common.common import Common
+from common.public import task_fun_mq, get_consumer, ack_message
 from common.scheduling_db import MysqlHelper
-from xiaoniangao.xiaoniangao_author.xiaoniangao_author_scheduling import XiaoniangaoAuthorScheduling
+from common.aliyun_log import AliyunLogger
+from xiaoniangao.xiaoniangao_author import XiaoNianGaoAuthor
 
 
-def main(log_type, crawler, topic_name, group_id, env):
+def main(my_platform, mode, topic_name, group_id, env):
     consumer = get_consumer(topic_name, group_id)
     # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
     # 长轮询时间3秒(最多可设置为30秒)。
     wait_seconds = 30
     # 一次最多消费3条(最多可设置为16条)。
     batch = 1
-    Common.logger(log_type, crawler).info(f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
-                                          f'WaitSeconds:{wait_seconds}\n'
-                                          f'TopicName:{topic_name}\n'
-                                          f'MQConsumer:{group_id}')
-    Common.logging(log_type, crawler, env, f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+    AliyunLogger.logging(
+        code="1000",
+        platform=my_platform,
+        mode=mode,
+        env=env,
+        message=f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+        f"WaitSeconds:{wait_seconds}\n"
+        f"TopicName:{topic_name}\n"
+        f"MQConsumer:{group_id}",
+    )
+    Common.logging(log_type=mode, crawler=my_platform, env=env, message=f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
                                            f'WaitSeconds:{wait_seconds}\n'
                                            f'TopicName:{topic_name}\n'
                                            f'MQConsumer:{group_id}')
@@ -32,18 +40,23 @@ def main(log_type, crawler, topic_name, group_id, env):
             # 长轮询消费消息。
             recv_msgs = consumer.consume_message(batch, wait_seconds)
             for msg in recv_msgs:
-                xng_author_start_time = int(time.time())
-                Common.logger(log_type, crawler).info(f"Receive\n"
-                                                      f"MessageId:{msg.message_id}\n"
-                                                      f"MessageBodyMD5:{msg.message_body_md5}\n"
-                                                      f"MessageTag:{msg.message_tag}\n"
-                                                      f"ConsumedTimes:{msg.consumed_times}\n"
-                                                      f"PublishTime:{msg.publish_time}\n"
-                                                      f"Body:{msg.message_body}\n"
-                                                      f"NextConsumeTime:{msg.next_consume_time}\n"
-                                                      f"ReceiptHandle:{msg.receipt_handle}\n"
-                                                      f"Properties:{msg.properties}")
-                Common.logging(log_type, crawler, env, f"Receive\n"
+                AliyunLogger.logging(
+                    code="1000",
+                    platform=my_platform,
+                    mode=mode,
+                    env=env,
+                    message=f"Receive\n"
+                    f"MessageId:{msg.message_id}\n"
+                    f"MessageBodyMD5:{msg.message_body_md5}\n"
+                    f"MessageTag:{msg.message_tag}\n"
+                    f"ConsumedTimes:{msg.consumed_times}\n"
+                    f"PublishTime:{msg.publish_time}\n"
+                    f"Body:{msg.message_body}\n"
+                    f"NextConsumeTime:{msg.next_consume_time}\n"
+                    f"ReceiptHandle:{msg.receipt_handle}\n"
+                    f"Properties:{msg.properties}",
+                )
+                Common.logging(mode, my_platform, env, f"Receive\n"
                                                        f"MessageId:{msg.message_id}\n"
                                                        f"MessageBodyMD5:{msg.message_body_md5}\n"
                                                        f"MessageTag:{msg.message_tag}\n"
@@ -54,57 +67,95 @@ def main(log_type, crawler, topic_name, group_id, env):
                                                        f"ReceiptHandle:{msg.receipt_handle}\n"
                                                        f"Properties:{msg.properties}")
                 # ack_mq_message
-                ack_message(log_type=log_type, crawler=crawler, recv_msgs=recv_msgs, consumer=consumer)
-
-                # 处理爬虫业务
-                task_dict = task_fun_mq(msg.message_body)['task_dict']
-                rule_dict = task_fun_mq(msg.message_body)['rule_dict']
-                task_id = task_dict['id']
-                select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
-                user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
-                Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
-                Common.logging(log_type, crawler, env, f"调度任务:{task_dict}")
-                Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
-                Common.logging(log_type, crawler, env, f"抓取规则:{rule_dict}")
-                Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
-                Common.logging(log_type, crawler, env, f"用户列表:\n{user_list}")
-                Common.logger(log_type, crawler).info(f'开始抓取:{task_dict["taskName"]}\n')
-                Common.logging(log_type, crawler, env, f'开始抓取:{task_dict["taskName"]}\n')
-                XiaoniangaoAuthorScheduling.get_author_videos(log_type=log_type,
-                                                              crawler=crawler,
-                                                              user_list=user_list,
-                                                              rule_dict=rule_dict,
-                                                              env=env)
-                # Common.del_logs(log_type, crawler)
-                Common.logger(log_type, crawler).info('抓取一轮结束\n')
-                Common.logging(log_type, crawler, env, '抓取一轮结束\n')
-                xng_author_end_time = int(time.time())
-                xng_author_duration = xng_author_start_time - xng_author_end_time
-                Common.logger(log_type, crawler).info(f"duration {xng_author_duration}")
-                Common.logging(log_type, crawler, env, f"duration {xng_author_duration}")
+                ack_message(
+                    log_type=mode,
+                    crawler=my_platform,
+                    recv_msgs=recv_msgs,
+                    consumer=consumer,
+                )
+                # 解析 task_dict
+                task_dict = task_fun_mq(msg.message_body)["task_dict"]
+                AliyunLogger.logging(
+                    "1000", my_platform, mode, env, f"调度任务:{task_dict}"
+                )
+                # 解析 rule_dict
+                rule_dict = task_fun_mq(msg.message_body)["rule_dict"]
+                AliyunLogger.logging(
+                    "1000", my_platform, mode, env, f"抓取规则:{rule_dict}\n"
+                )
+                # 解析 user_list
+                task_id = task_dict["id"]
+                select_user_sql = (
+                    f"""select * from crawler_user_v3 where task_id={task_id}"""
+                )
+                user_list = MysqlHelper.get_values(
+                    mode, my_platform, select_user_sql, env, action=""
+                )
+                # our_uid_list = []
+                # our_uid = random.choice(our_uid_list)
+                AliyunLogger.logging(
+                    code="1003",
+                    platform=my_platform,
+                    mode=mode,
+                    env=env,
+                    message="成功获取信息,启动爬虫,开始一轮抓取",
+               )
+                XNGAuthor = XiaoNianGaoAuthor(
+                    platform=my_platform,
+                    mode=mode,
+                    env=env,
+                    rule_dict=rule_dict,
+                    user_list=user_list
+                )
+                XNGAuthor.get_author_list()
+                AliyunLogger.logging(
+                    code="1004",
+                    platform=my_platform,
+                    mode=mode,
+                    env=env,
+                    message="成功抓取完一轮",
+                )
 
         except MQExceptionBase as err:
             # Topic中没有消息可消费。
             if err.type == "MessageNotExist":
-                Common.logger(log_type, crawler).info(f"No new message! RequestId:{err.req_id}\n")
-                Common.logging(log_type, crawler, env, f"No new message! RequestId:{err.req_id}\n")
+                AliyunLogger.logging(
+                    code="1000",
+                    platform=my_platform,
+                    mode=mode,
+                    env=env,
+                    message=f"No new message! RequestId:{err.req_id}\n",
+                )
+                Common.logging(
+                    log_type=mode,
+                    crawler=my_platform,
+                    env=env,
+                    message=f"No new message! RequestId:{err.req_id}\n",
+                )
                 continue
-
-            Common.logger(log_type, crawler).info(f"Consume Message Fail! Exception:{err}\n")
-            Common.logging(log_type, crawler, env, f"Consume Message Fail! Exception:{err}\n")
+            AliyunLogger.logging(
+                code="1000",
+                platform=my_platform,
+                mode=mode,
+                env=env,
+                message=f"Consume Message Fail! Exception:{err}\n",
+            )
             time.sleep(2)
             continue
 
+
 if __name__ == "__main__":
     parser = argparse.ArgumentParser()  ## 新建参数解释器对象
-    parser.add_argument('--log_type', type=str)  ## 添加参数,注明参数类型
-    parser.add_argument('--crawler')  ## 添加参数
-    parser.add_argument('--topic_name')  ## 添加参数
-    parser.add_argument('--group_id')  ## 添加参数
-    parser.add_argument('--env')  ## 添加参数
+    parser.add_argument("--log_type", type=str)  ## 添加参数,注明参数类型
+    parser.add_argument("--crawler")  ## 添加参数
+    parser.add_argument("--topic_name")  ## 添加参数
+    parser.add_argument("--group_id")  ## 添加参数
+    parser.add_argument("--env")  ## 添加参数
     args = parser.parse_args()  ### 参数赋值,也可以通过终端赋值
-    main(log_type=args.log_type,
-         crawler=args.crawler,
-         topic_name=args.topic_name,
-         group_id=args.group_id,
-         env=args.env)
+    main(
+        my_platform=args.crawler,
+        mode=args.log_type,
+        topic_name=args.topic_name,
+        group_id=args.group_id,
+        env=args.env,
+    )

+ 110 - 0
xiaoniangao/xiaoniangao_main/run_xng_author_v1.py

@@ -0,0 +1,110 @@
+# -*- coding: utf-8 -*-
+# @Author: wangkun
+# @Time: 2023/6/7
+import argparse
+from mq_http_sdk.mq_client import *
+from mq_http_sdk.mq_consumer import *
+from mq_http_sdk.mq_exception import MQExceptionBase
+sys.path.append(os.getcwd())
+from common.public import get_consumer, ack_message, task_fun_mq
+from common.common import Common
+from common.scheduling_db import MysqlHelper
+from xiaoniangao.xiaoniangao_author.xiaoniangao_author_scheduling import XiaoniangaoAuthorScheduling
+
+
+def main(log_type, crawler, topic_name, group_id, env):
+    consumer = get_consumer(topic_name, group_id)
+    # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
+    # 长轮询时间3秒(最多可设置为30秒)。
+    wait_seconds = 30
+    # 一次最多消费3条(最多可设置为16条)。
+    batch = 1
+    Common.logger(log_type, crawler).info(f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+                                          f'WaitSeconds:{wait_seconds}\n'
+                                          f'TopicName:{topic_name}\n'
+                                          f'MQConsumer:{group_id}')
+    Common.logging(log_type, crawler, env, f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
+                                           f'WaitSeconds:{wait_seconds}\n'
+                                           f'TopicName:{topic_name}\n'
+                                           f'MQConsumer:{group_id}')
+    while True:
+        try:
+            # 长轮询消费消息。
+            recv_msgs = consumer.consume_message(batch, wait_seconds)
+            for msg in recv_msgs:
+                xng_author_start_time = int(time.time())
+                Common.logger(log_type, crawler).info(f"Receive\n"
+                                                      f"MessageId:{msg.message_id}\n"
+                                                      f"MessageBodyMD5:{msg.message_body_md5}\n"
+                                                      f"MessageTag:{msg.message_tag}\n"
+                                                      f"ConsumedTimes:{msg.consumed_times}\n"
+                                                      f"PublishTime:{msg.publish_time}\n"
+                                                      f"Body:{msg.message_body}\n"
+                                                      f"NextConsumeTime:{msg.next_consume_time}\n"
+                                                      f"ReceiptHandle:{msg.receipt_handle}\n"
+                                                      f"Properties:{msg.properties}")
+                Common.logging(log_type, crawler, env, f"Receive\n"
+                                                       f"MessageId:{msg.message_id}\n"
+                                                       f"MessageBodyMD5:{msg.message_body_md5}\n"
+                                                       f"MessageTag:{msg.message_tag}\n"
+                                                       f"ConsumedTimes:{msg.consumed_times}\n"
+                                                       f"PublishTime:{msg.publish_time}\n"
+                                                       f"Body:{msg.message_body}\n"
+                                                       f"NextConsumeTime:{msg.next_consume_time}\n"
+                                                       f"ReceiptHandle:{msg.receipt_handle}\n"
+                                                       f"Properties:{msg.properties}")
+                # ack_mq_message
+                ack_message(log_type=log_type, crawler=crawler, recv_msgs=recv_msgs, consumer=consumer)
+
+                # 处理爬虫业务
+                task_dict = task_fun_mq(msg.message_body)['task_dict']
+                rule_dict = task_fun_mq(msg.message_body)['rule_dict']
+                task_id = task_dict['id']
+                select_user_sql = f"""select * from crawler_user_v3 where task_id={task_id}"""
+                user_list = MysqlHelper.get_values(log_type, crawler, select_user_sql, env, action="")
+                Common.logger(log_type, crawler).info(f"调度任务:\n{task_dict}")
+                Common.logging(log_type, crawler, env, f"调度任务:{task_dict}")
+                Common.logger(log_type, crawler).info(f"抓取规则:\n{rule_dict}")
+                Common.logging(log_type, crawler, env, f"抓取规则:{rule_dict}")
+                Common.logger(log_type, crawler).info(f"用户列表:\n{user_list}")
+                Common.logging(log_type, crawler, env, f"用户列表:\n{user_list}")
+                Common.logger(log_type, crawler).info(f'开始抓取:{task_dict["taskName"]}\n')
+                Common.logging(log_type, crawler, env, f'开始抓取:{task_dict["taskName"]}\n')
+                XiaoniangaoAuthorScheduling.get_author_videos(log_type=log_type,
+                                                              crawler=crawler,
+                                                              user_list=user_list,
+                                                              rule_dict=rule_dict,
+                                                              env=env)
+                # Common.del_logs(log_type, crawler)
+                Common.logger(log_type, crawler).info('抓取一轮结束\n')
+                Common.logging(log_type, crawler, env, '抓取一轮结束\n')
+                xng_author_end_time = int(time.time())
+                xng_author_duration = xng_author_start_time - xng_author_end_time
+                Common.logger(log_type, crawler).info(f"duration {xng_author_duration}")
+                Common.logging(log_type, crawler, env, f"duration {xng_author_duration}")
+
+        except MQExceptionBase as err:
+            # Topic中没有消息可消费。
+            if err.type == "MessageNotExist":
+                Common.logger(log_type, crawler).info(f"No new message! RequestId:{err.req_id}\n")
+                Common.logging(log_type, crawler, env, f"No new message! RequestId:{err.req_id}\n")
+                continue
+
+            Common.logger(log_type, crawler).info(f"Consume Message Fail! Exception:{err}\n")
+            Common.logging(log_type, crawler, env, f"Consume Message Fail! Exception:{err}\n")
+            time.sleep(2)
+            continue
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()  ## 新建参数解释器对象
+    parser.add_argument('--log_type', type=str)  ## 添加参数,注明参数类型
+    parser.add_argument('--crawler')  ## 添加参数
+    parser.add_argument('--topic_name')  ## 添加参数
+    parser.add_argument('--group_id')  ## 添加参数
+    parser.add_argument('--env')  ## 添加参数
+    args = parser.parse_args()  ### 参数赋值,也可以通过终端赋值
+    main(log_type=args.log_type,
+         crawler=args.crawler,
+         topic_name=args.topic_name,
+         group_id=args.group_id,
+         env=args.env)

+ 0 - 161
xiaoniangao/xiaoniangao_main/run_xng_author_v2.py

@@ -1,161 +0,0 @@
-# -*- coding: utf-8 -*-
-# @Author: luojunhui
-# @Time: 2023/10/23
-import argparse
-from mq_http_sdk.mq_client import *
-from mq_http_sdk.mq_consumer import *
-from mq_http_sdk.mq_exception import MQExceptionBase
-
-sys.path.append(os.getcwd())
-from common.common import Common
-from common.public import task_fun_mq, get_consumer, ack_message
-from common.scheduling_db import MysqlHelper
-from common.aliyun_log import AliyunLogger
-from xiaoniangao.xiaoniangao_author import XiaoNianGaoAuthor
-
-
-def main(my_platform, mode, topic_name, group_id, env):
-    consumer = get_consumer(topic_name, group_id)
-    # 长轮询表示如果Topic没有消息,则客户端请求会在服务端挂起3秒,3秒内如果有消息可以消费则立即返回响应。
-    # 长轮询时间3秒(最多可设置为30秒)。
-    wait_seconds = 30
-    # 一次最多消费3条(最多可设置为16条)。
-    batch = 1
-    AliyunLogger.logging(
-        code="1000",
-        platform=my_platform,
-        mode=mode,
-        env=env,
-        message=f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
-        f"WaitSeconds:{wait_seconds}\n"
-        f"TopicName:{topic_name}\n"
-        f"MQConsumer:{group_id}",
-    )
-    Common.logging(log_type=mode, crawler=my_platform, env=env, message=f'{10 * "="}Consume And Ack Message From Topic{10 * "="}\n'
-                                           f'WaitSeconds:{wait_seconds}\n'
-                                           f'TopicName:{topic_name}\n'
-                                           f'MQConsumer:{group_id}')
-    while True:
-        try:
-            # 长轮询消费消息。
-            recv_msgs = consumer.consume_message(batch, wait_seconds)
-            for msg in recv_msgs:
-                AliyunLogger.logging(
-                    code="1000",
-                    platform=my_platform,
-                    mode=mode,
-                    env=env,
-                    message=f"Receive\n"
-                    f"MessageId:{msg.message_id}\n"
-                    f"MessageBodyMD5:{msg.message_body_md5}\n"
-                    f"MessageTag:{msg.message_tag}\n"
-                    f"ConsumedTimes:{msg.consumed_times}\n"
-                    f"PublishTime:{msg.publish_time}\n"
-                    f"Body:{msg.message_body}\n"
-                    f"NextConsumeTime:{msg.next_consume_time}\n"
-                    f"ReceiptHandle:{msg.receipt_handle}\n"
-                    f"Properties:{msg.properties}",
-                )
-                Common.logging(mode, my_platform, env, f"Receive\n"
-                                                       f"MessageId:{msg.message_id}\n"
-                                                       f"MessageBodyMD5:{msg.message_body_md5}\n"
-                                                       f"MessageTag:{msg.message_tag}\n"
-                                                       f"ConsumedTimes:{msg.consumed_times}\n"
-                                                       f"PublishTime:{msg.publish_time}\n"
-                                                       f"Body:{msg.message_body}\n"
-                                                       f"NextConsumeTime:{msg.next_consume_time}\n"
-                                                       f"ReceiptHandle:{msg.receipt_handle}\n"
-                                                       f"Properties:{msg.properties}")
-                # ack_mq_message
-                ack_message(
-                    log_type=mode,
-                    crawler=my_platform,
-                    recv_msgs=recv_msgs,
-                    consumer=consumer,
-                )
-                # 解析 task_dict
-                task_dict = task_fun_mq(msg.message_body)["task_dict"]
-                AliyunLogger.logging(
-                    "1000", my_platform, mode, env, f"调度任务:{task_dict}"
-                )
-                # 解析 rule_dict
-                rule_dict = task_fun_mq(msg.message_body)["rule_dict"]
-                AliyunLogger.logging(
-                    "1000", my_platform, mode, env, f"抓取规则:{rule_dict}\n"
-                )
-                # 解析 user_list
-                task_id = task_dict["id"]
-                select_user_sql = (
-                    f"""select * from crawler_user_v3 where task_id={task_id}"""
-                )
-                user_list = MysqlHelper.get_values(
-                    mode, my_platform, select_user_sql, env, action=""
-                )
-                # our_uid_list = []
-                # our_uid = random.choice(our_uid_list)
-                AliyunLogger.logging(
-                    code="1003",
-                    platform=my_platform,
-                    mode=mode,
-                    env=env,
-                    message="成功获取信息,启动爬虫,开始一轮抓取",
-               )
-                XNGAuthor = XiaoNianGaoAuthor(
-                    platform=my_platform,
-                    mode=mode,
-                    env=env,
-                    rule_dict=rule_dict,
-                    user_list=user_list
-                )
-                XNGAuthor.get_author_list()
-                AliyunLogger.logging(
-                    code="1004",
-                    platform=my_platform,
-                    mode=mode,
-                    env=env,
-                    message="成功抓取完一轮",
-                )
-
-        except MQExceptionBase as err:
-            # Topic中没有消息可消费。
-            if err.type == "MessageNotExist":
-                AliyunLogger.logging(
-                    code="1000",
-                    platform=my_platform,
-                    mode=mode,
-                    env=env,
-                    message=f"No new message! RequestId:{err.req_id}\n",
-                )
-                Common.logging(
-                    log_type=mode,
-                    crawler=my_platform,
-                    env=env,
-                    message=f"No new message! RequestId:{err.req_id}\n",
-                )
-                continue
-            AliyunLogger.logging(
-                code="1000",
-                platform=my_platform,
-                mode=mode,
-                env=env,
-                message=f"Consume Message Fail! Exception:{err}\n",
-            )
-            time.sleep(2)
-            continue
-
-
-if __name__ == "__main__":
-    parser = argparse.ArgumentParser()  ## 新建参数解释器对象
-    parser.add_argument("--log_type", type=str)  ## 添加参数,注明参数类型
-    parser.add_argument("--crawler")  ## 添加参数
-    parser.add_argument("--topic_name")  ## 添加参数
-    parser.add_argument("--group_id")  ## 添加参数
-    parser.add_argument("--env")  ## 添加参数
-    args = parser.parse_args()  ### 参数赋值,也可以通过终端赋值
-    main(
-        my_platform=args.crawler,
-        mode=args.log_type,
-        topic_name=args.topic_name,
-        group_id=args.group_id,
-        env=args.env,
-    )

+ 1 - 1
youlegaoxiaoxiaoshipin/youlegaoxiaoxiaoshipin_recommend/youlegaoxiaoxiaoshipin_scheduling.py

@@ -200,7 +200,7 @@ if __name__ == "__main__":
         platform="ylgxxsp",
         mode="recommend",
         rule_dict={},
-        our_uid="luojunhuihaoshuai",
+        our_uid_list=["luojunhuihaoshuai"],
         env="prod",
     )
     for i in range(5):