lierqiang vor 2 Jahren
Ursprung
Commit
c5c922a6e3
1 geänderte Dateien mit 14 neuen und 14 gelöschten Zeilen
  1. 14 14
      kuaishou/kuaishou_recommend/recommend_kuaishou.py

+ 14 - 14
kuaishou/kuaishou_recommend/recommend_kuaishou.py

@@ -360,20 +360,20 @@ class KuaiShouRecommend:
                                   'session': f"kuaishou{int(time.time())}"}
 
                     rule_1 = cls.download_rule(video_dict, rule_dict_1)
-                    Common.logger(log_type, crawler).info(f"video_title:{video_title}")
-                    Common.logger(log_type, crawler).info(f"video_id:{video_id}\n")
-
-                    Common.logger(log_type, crawler).info(
-                        f"like_cnt:{video_dict['like_cnt']}{rule_dict_1['like_cnt']}, {eval(str(video_dict['like_cnt']) + str(rule_dict_1['like_cnt']))}")
-                    Common.logger(log_type, crawler).info(
-                        f"video_width:{video_dict['video_width']}{rule_dict_1['video_width']}, {eval(str(video_dict['video_width']) + str(rule_dict_1['video_width']))}")
-                    Common.logger(log_type, crawler).info(
-                        f"video_height:{video_dict['video_height']}{rule_dict_1['video_height']}, {eval(str(video_dict['video_height']) + str(rule_dict_1['video_height']))}")
-                    Common.logger(log_type, crawler).info(
-                        f"duration:{video_dict['duration']}{rule_dict_1['duration']}, {eval(str(video_dict['duration']) + str(rule_dict_1['duration']))}")
-                    Common.logger(log_type, crawler).info(
-                        f"publish_time:{video_dict['publish_time']}{rule_dict_1['publish_time']}, {eval(str(video_dict['publish_time']) + str(rule_dict_1['publish_time']))}")
-                    Common.logger(log_type, crawler).info(f"rule_1:{rule_1}\n")
+                    # Common.logger(log_type, crawler).info(f"video_title:{video_title}")
+                    # Common.logger(log_type, crawler).info(f"video_id:{video_id}\n")
+                    #
+                    # Common.logger(log_type, crawler).info(
+                    #     f"like_cnt:{video_dict['like_cnt']}{rule_dict_1['like_cnt']}, {eval(str(video_dict['like_cnt']) + str(rule_dict_1['like_cnt']))}")
+                    # Common.logger(log_type, crawler).info(
+                    #     f"video_width:{video_dict['video_width']}{rule_dict_1['video_width']}, {eval(str(video_dict['video_width']) + str(rule_dict_1['video_width']))}")
+                    # Common.logger(log_type, crawler).info(
+                    #     f"video_height:{video_dict['video_height']}{rule_dict_1['video_height']}, {eval(str(video_dict['video_height']) + str(rule_dict_1['video_height']))}")
+                    # Common.logger(log_type, crawler).info(
+                    #     f"duration:{video_dict['duration']}{rule_dict_1['duration']}, {eval(str(video_dict['duration']) + str(rule_dict_1['duration']))}")
+                    # Common.logger(log_type, crawler).info(
+                    #     f"publish_time:{video_dict['publish_time']}{rule_dict_1['publish_time']}, {eval(str(video_dict['publish_time']) + str(rule_dict_1['publish_time']))}")
+                    # Common.logger(log_type, crawler).info(f"rule_1:{rule_1}\n")
                     if video_title == "" or video_url == "":
                         Common.logger(log_type, crawler).info("无效视频\n")
                         continue