wangkun %!s(int64=2) %!d(string=hai) anos
pai
achega
2a9d79f7a5

+ 2 - 0
common/feishu.py

@@ -429,6 +429,8 @@ class Feishu:
                 username = '13513479926'
             elif username == "yuzhuoyi":
                 username = '18624010360'
+            elif username == "rennian":
+                username = '18622113710'
 
             data = {"mobiles": [username]}
             urllib3.disable_warnings()

+ 5 - 15
gongzhonghao/gongzhonghao_author/gongzhonghao4_author.py

@@ -24,8 +24,6 @@ from common.public import get_config_from_mysql
 
 
 class GongzhonghaoAuthor4:
-    # 翻页参数
-    begin = 0
     platform = "公众号"
 
     # 基础门槛规则
@@ -145,7 +143,6 @@ class GongzhonghaoAuthor4:
         select_sql = f""" select * from crawler_config where source="{crawler}" and title LIKE "%公众号_4%";"""
         configs = MysqlHelper.get_values(log_type, crawler, select_sql, env, action="")
         if len(configs) == 0:
-            # Common.logger(log_type, crawler).warning(f"公众号_3未配置token")
             Feishu.bot(log_type, crawler, "公众号_4:未配置token")
             time.sleep(60)
             return None
@@ -157,8 +154,8 @@ class GongzhonghaoAuthor4:
             "update_time": time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(configs[0]["update_time"]/1000))),
             "operator": configs[0]["operator"]
         }
-        for k, v in token_dict.items():
-            print(f"{k}:{v}")
+        # for k, v in token_dict.items():
+        #     print(f"{k}:{v}")
         return token_dict
 
     # 获取用户 fakeid
@@ -201,7 +198,6 @@ class GongzhonghaoAuthor4:
             if r.json()["base_resp"]["err_msg"] == "invalid session":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 过期啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n过期啦,请扫码更换token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -209,7 +205,6 @@ class GongzhonghaoAuthor4:
             if r.json()["base_resp"]["err_msg"] == "freq control":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -217,7 +212,6 @@ class GongzhonghaoAuthor4:
             if "list" not in r.json() or len(r.json()["list"]) == 0:
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -280,7 +274,7 @@ class GongzhonghaoAuthor4:
     # 获取文章列表
     @classmethod
     def get_videoList(cls, log_type, crawler, wechat_name, rule_dict, user_name, uid, oss_endpoint, env):
-        # try:
+        begin = 0
         while True:
             token_dict = cls.get_token(log_type, crawler, env)
             fakeid_dict = cls.get_fakeid(log_type=log_type,
@@ -308,7 +302,7 @@ class GongzhonghaoAuthor4:
             }
             params = {
                 "action": "list_ex",
-                "begin": str(cls.begin),
+                "begin": str(begin),
                 "count": "5",
                 "fakeid": fakeid_dict['fakeid'],
                 "type": "9",
@@ -324,7 +318,6 @@ class GongzhonghaoAuthor4:
             if r.json()["base_resp"]["err_msg"] == "invalid session":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_videoList:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 过期啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']}\n过期啦,请扫码更换token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -332,7 +325,6 @@ class GongzhonghaoAuthor4:
             if r.json()["base_resp"]["err_msg"] == "freq control":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_videoList:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}, 操作人:{token_dict['operator']}, 更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler,f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -340,7 +332,6 @@ class GongzhonghaoAuthor4:
             if 'app_msg_list' not in r.json():
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_videoList:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']}\n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -349,7 +340,7 @@ class GongzhonghaoAuthor4:
                 Common.logger(log_type, crawler).info('没有更多视频了\n')
                 return
             else:
-                cls.begin += 5
+                begin += 5
                 app_msg_list = r.json()['app_msg_list']
                 for article_url in app_msg_list:
                     # title
@@ -560,7 +551,6 @@ class GongzhonghaoAuthor4:
                               uid=uid,
                               oss_endpoint=oss_endpoint,
                               env=env)
-            cls.begin = 0
             Common.logger(log_type, crawler).info('休眠 60 秒\n')
             time.sleep(60)
             # except Exception as e:

+ 10 - 6
scheduling/scheduling_v3/crawler_scheduling_v3.py

@@ -88,12 +88,16 @@ class SchedulingV3:
         mode = task['mode']
         source = task['source']
         spider_name = task['spider_name']
-        # if env == "aliyun":
-        #     oss_endpoint = "inner"
-        # elif env == "hk":
-        #     oss_endpoint = "hk"
-        # else:
-        #     oss_endpoint = "out"
+        if "gongzhonghao2" in spider_name:
+            mode = "author2"
+        elif "gongzhonghao3" in spider_name:
+            mode = "author3"
+        elif "gongzhonghao4" in spider_name:
+            mode = "author4"
+        elif "gongzhonghao5" in spider_name:
+            mode = "author5"
+        else:
+            mode = mode
 
         # 正式环境,调度任务
         Common.logger(log_type, crawler).info(f"开始调度任务")