lierqiang 2 years ago
parent
commit
74868b7d7e
1 changed files with 2 additions and 5 deletions
  1. 2 5
      xigua/xigua_search/xigua_search.py

+ 2 - 5
xigua/xigua_search/xigua_search.py

@@ -618,7 +618,7 @@ class Search:
             return video_dict
             return video_dict
         except Exception as e:
         except Exception as e:
             Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
             Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
-            return
+            return {}
     @classmethod
     @classmethod
     def is_ruled(cls, log_type, crawler, video_dict, rule_dict):
     def is_ruled(cls, log_type, crawler, video_dict, rule_dict):
         old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
         old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
@@ -718,11 +718,8 @@ class Search:
                             Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
                             Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
                             continue
                             continue
 
 
-                if not cls.is_ruled(log_type, crawler, video_dict, rule_dict):
-                    Common.logger(log_type, crawler).info(f'gid:{item_id},不符合抓取规则\n')
-                    continue
                 if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
                 if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
-                    Common.logger(log_type, crawler).info(f'gid:{item_id},视频已下载,无需重复下载\n')
+                    Common.logger(log_type, crawler).info(f'gid:{video_dict["video_id"]},视频已下载,无需重复下载\n')
                     continue
                     continue
                 for k, v in video_dict.items():
                 for k, v in video_dict.items():
                     Common.logger(log_type, crawler).info(f"{k}:{v}")
                     Common.logger(log_type, crawler).info(f"{k}:{v}")