Browse Source

add kuaishou 规则

zhangyong 1 year ago
parent
commit
f5840abeaf
1 changed files with 27 additions and 13 deletions
  1. 27 13
      kuaishou/kuaishou_author/kuaishou_author_scheduling_new.py

+ 27 - 13
kuaishou/kuaishou_author/kuaishou_author_scheduling_new.py

@@ -168,6 +168,14 @@ class KuaishouauthorScheduling:
             feeds = response.json()['data']['visionProfilePhotoList']['feeds']
             feeds = response.json()['data']['visionProfilePhotoList']['feeds']
             for i in range(len(feeds)):
             for i in range(len(feeds)):
                 try:
                 try:
+                    Common.logger(log_type, crawler).info('扫描到一条视频\n')
+                    AliyunLogger.logging(
+                        code="1001",
+                        platform=crawler,
+                        mode=log_type,
+                        env=env,
+                        message='扫描到一条视频\n'
+                    )
                     if cls.download_cnt >= cls.videos_cnt(rule_dict):
                     if cls.download_cnt >= cls.videos_cnt(rule_dict):
                         Common.logger(log_type, crawler).info(f"已下载视频数:{cls.download_cnt}\n")
                         Common.logger(log_type, crawler).info(f"已下载视频数:{cls.download_cnt}\n")
                         AliyunLogger.logging(
                         AliyunLogger.logging(
@@ -190,29 +198,35 @@ class KuaishouauthorScheduling:
                         video_height = feeds[i].get("photo", {}).get("videoResource").get("hevc", {}).get("adaptationSet", {})[0].get("representation", {})[0].get("height", 0)
                         video_height = feeds[i].get("photo", {}).get("videoResource").get("hevc", {}).get("adaptationSet", {})[0].get("representation", {})[0].get("height", 0)
                     publish_time_stamp = int(int(feeds[i].get('photo', {}).get('timestamp', 0)) / 1000)
                     publish_time_stamp = int(int(feeds[i].get('photo', {}).get('timestamp', 0)) / 1000)
                     publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
                     publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
-                    date_three_days_ago_string = (date.today() + timedelta(days=-60)).strftime("%Y-%m-%d %H:%M:%S")
+                    date_three_days_ago_string = (date.today() + timedelta(days=-5)).strftime("%Y-%m-%d %H:%M:%S")
                     rule = publish_time_str > date_three_days_ago_string
                     rule = publish_time_str > date_three_days_ago_string
+                    if i > 2:
+                        if rule == False:
+                            break
                     if rule == False:
                     if rule == False:
-                        Common.logger(log_type, crawler).info(f"发布时间小于60天,发布时间:{publish_time_str}\n")
+                        Common.logger(log_type, crawler).info(f"发布时间小于5天,发布时间:{publish_time_str}\n")
                         AliyunLogger.logging(
                         AliyunLogger.logging(
                             code="2004",
                             code="2004",
                             platform=crawler,
                             platform=crawler,
                             mode=log_type,
                             mode=log_type,
                             env=env,
                             env=env,
-                            message=f"发布时间小于60天,发布时间:{publish_time_str}\n"
+                            message=f"发布时间小于5天,发布时间:{publish_time_str}\n"
                         )
                         )
                         continue
                         continue
+                    viewCount = int(feeds[i].get('photo', {}).get('viewCount', 0))
                     realLikeCount = int(feeds[i].get('photo', {}).get('realLikeCount', 0))
                     realLikeCount = int(feeds[i].get('photo', {}).get('realLikeCount', 0))
-                    if realLikeCount < 10000:
-                        Common.logger(log_type, crawler).info(f"点赞量:{realLikeCount}\n")
-                        AliyunLogger.logging(
-                            code="2004",
-                            platform=crawler,
-                            mode=log_type,
-                            env=env,
-                            message=f"点赞量:{realLikeCount}\n"
-                        )
-                        continue
+                    video_percent = '%.2f' % (realLikeCount / viewCount)
+                    if viewCount < 100000:
+                        if video_percent < 0.02:
+                            Common.logger(log_type, crawler).info(f"不符合条件:点赞/播放-{video_percent},播放量-{viewCount}\n")
+                            AliyunLogger.logging(
+                                code="2004",
+                                platform=crawler,
+                                mode=log_type,
+                                env=env,
+                                message=f"点赞量:{realLikeCount}\n"
+                            )
+                            continue
                     video_dict = {'video_title': video_title,
                     video_dict = {'video_title': video_title,
                                   'video_id': video_id,
                                   'video_id': video_id,
                                   'play_cnt': int(feeds[i].get('photo', {}).get('viewCount', 0)),
                                   'play_cnt': int(feeds[i].get('photo', {}).get('viewCount', 0)),