wangkun 1 year ago
parent
commit
dfaf58e037
1 changed files with 11 additions and 10 deletions
  1. 11 10
      gongzhonghao/gongzhonghao_author/gongzhonghao3_author.py

+ 11 - 10
gongzhonghao/gongzhonghao_author/gongzhonghao3_author.py

@@ -106,11 +106,11 @@ class GongzhonghaoAuthor3:
             }
             urllib3.disable_warnings()
             r = requests.get(url=url, headers=headers, params=params, verify=False)
+            Common.logger(log_type, crawler).info(f"get_user_info_res:{r.text}\n")
             r.close()
             if r.json()["base_resp"]["err_msg"] == "invalid session":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 过期啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n过期啦,请扫码更换token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -118,7 +118,6 @@ class GongzhonghaoAuthor3:
             if r.json()["base_resp"]["err_msg"] == "freq control":
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -126,7 +125,6 @@ class GongzhonghaoAuthor3:
             if "list" not in r.json() or len(r.json()["list"]) == 0:
                 Common.logger(log_type, crawler).warning(f"status_code:{r.status_code}")
                 Common.logger(log_type, crawler).warning(f"get_fakeid:{r.text}\n")
-                # Common.logger(log_type, crawler).warning(f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} 频控啦\n")
                 if 20 >= datetime.datetime.now().hour >= 10:
                     Feishu.bot(log_type, crawler, f"{token_dict['title']}\n操作人:{token_dict['operator']}\n更换日期:{token_dict['update_time']} \n频控啦,请扫码更换其他公众号token\nhttps://mp.weixin.qq.com/")
                 time.sleep(60 * 10)
@@ -462,13 +460,16 @@ class GongzhonghaoAuthor3:
             if len_sheet >= 301:
                 len_sheet = 301
             for i in range(201, len_sheet):
-                user_dict = cls.get_users(log_type=log_type,
-                                          crawler=crawler,
-                                          user_sheet=user_sheet,
-                                          sheetid=sheetid,
-                                          i=i,
-                                          env=env)
-                Common.logger(log_type, crawler).info(f'获取 {user_dict["user_name"]} 公众号视频\n')
+                try:
+                    user_dict = cls.get_users(log_type=log_type,
+                                              crawler=crawler,
+                                              user_sheet=user_sheet,
+                                              sheetid=sheetid,
+                                              i=i,
+                                              env=env)
+                except Exception as e:
+                    Common.logger(log_type, crawler).info(f"获取用户信息失败:{e}\n")
+
                 try:
                     Common.logger(log_type, crawler).info(f'获取 {user_dict["user_name"]} 公众号视频\n')
                     cls.get_videoList(log_type=log_type,