Ver Fonte

Merge branch 'feature_2023121211_liqian_recommend_data_task_opt' into test

liqian há 1 ano atrás
pai
commit
6ebae6322f
1 ficheiros alterados com 3 adições e 5 exclusões
  1. 3 5
      region_rule_rank_h.py

+ 3 - 5
region_rule_rank_h.py

@@ -680,7 +680,7 @@ def video_rank(df, now_date, now_h, rule_key, param, region, data_key, rule_rank
         rule_rank_h_flag=rule_rank_h_flag, political_filter=political_filter,
         shield_config=shield_config, dup_remove=dup_remove
     )
-    log_.info(f"==============")
+    # log_.info(f"==============")
 
 
 def dup_data(h_video_ids, initial_key_name, dup_key_name, region, political_filter, shield_config, dup_remove):
@@ -808,7 +808,7 @@ def dup_to_redis_with_timecheck(h_video_ids, now_date, now_h, rule_key, h_rule_k
         region_24h_status = redis_helper.get_data_from_redis(key_name=config_.REGION_24H_DATA_STATUS)
         rule_h_status = redis_helper.get_data_from_redis(key_name=config_.RULE_H_DATA_STATUS)
         if rule_24h_status == '1' and region_24h_status == '1' and rule_h_status == '1':
-            log_.info("dup data start ....")
+            # log_.info("dup data start ....")
             # ##### 去重更新不区分地域小时级列表,并另存为redis中
             if h_rule_key is not None:
                 h_key_name = \
@@ -881,7 +881,7 @@ def dup_to_redis_with_timecheck(h_video_ids, now_date, now_h, rule_key, h_rule_k
             break
         else:
             # 数据没准备好,1分钟后重新检查
-            log_.info("dup data wait ....")
+            # log_.info("dup data wait ....")
             time.sleep(60)
             # Timer(
             #     60,
@@ -892,8 +892,6 @@ def dup_to_redis_with_timecheck(h_video_ids, now_date, now_h, rule_key, h_rule_k
             # ).start()
 
 
-
-
 def merge_df(df_left, df_right):
     """
     df按照videoid, code 合并,对应特征求和