Преглед на файлове

Merge branch 'feature_2023121413_liqian_recommend_data_update_task_opt' into test

liqian преди 1 година
родител
ревизия
c3881d1a28

+ 159 - 0
ad_out_v1_get_offline_score_item_v2.py

@@ -0,0 +1,159 @@
+#coding utf-8
+import sys
+import datetime
+import traceback
+from threading import Timer
+from tqdm import tqdm
+from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
+from config import set_config
+from log import Log
+from records_process import records_process
+
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+from feature import get_item_features as get_features
+from lr_model import LrModel
+from utils import exe_sql
+
+model_key = 'ad_out_v1'
+lr_model = LrModel('model/{}.json'.format(model_key))
+item_h_dict = {}
+key_name_prefix = f"{config_.KEY_NAME_PREFIX_AD_OUT_MODEL_SCORE_ITEM}{model_key}"
+print(key_name_prefix)
+# 过期时间:一周
+expire_time = 7 * 24 * 3600
+
+def process_and_store(row):
+    k = str(row['k'])
+    features = get_features(row)
+    h = lr_model.predict_h(features)
+    redis_helper.set_data_to_redis(f"{key_name_prefix}:{k}", round(h, 6), expire_time)
+
+def update_offline_score_item(dt):
+    project = 'loghubods'
+    sql = """
+--odps sql 
+--********************************************************************--
+--author:研发
+--create time:2023-12-11 23:54:20
+--********************************************************************--
+with candidate_item as (
+select
+-- 基础特征_视频
+videoid AS i_id
+,uid AS i_up_id
+-- ,tags as i_tag
+-- ,title as i_title
+,ceil(log2(length(title) + 1)) as i_title_len
+,ceil(log2(total_time + 1)) as i_play_len
+,ceil(log2(existence_days + 1)) as i_days_since_upload -- 发布时间(距离现在天数)
+-- 基础特征_场景
+-- ,apptype AS ctx_apptype
+-- ,ctx_day AS ctx_day
+-- ,ctx_week AS ctx_week
+-- ,ctx_hour AS ctx_hour
+-- ,ctx_region as ctx_region
+-- ,ctx_city as ctx_city
+-- 基础特征_交叉
+-- ,ui_is_out as ui_is_out
+-- ,i_play_len as playtime
+-- ,IF(i_play_len > 1,'0','1') AS ui_is_out_new
+-- ,rootmid AS ui_root_id
+-- ,shareid AS ui_share_id
+-- 统计特征_视频
+,ceil(log2(i_1day_exp_cnt + 1)) as i_1day_exp_cnt
+,ceil(log2(i_1day_click_cnt + 1)) as i_1day_click_cnt
+,ceil(log2(i_1day_share_cnt + 1)) as i_1day_share_cnt
+,ceil(log2(i_1day_return_cnt + 1)) as i_1day_return_cnt
+,ceil(log2(i_3day_exp_cnt + 1)) as i_3day_exp_cnt
+,ceil(log2(i_3day_click_cnt + 1)) as i_3day_click_cnt
+,ceil(log2(i_3day_share_cnt + 1)) as i_3day_share_cnt
+,ceil(log2(i_3day_return_cnt + 1)) as i_3day_return_cnt
+,ceil(log2(i_7day_exp_cnt + 1)) as i_7day_exp_cnt
+,ceil(log2(i_7day_click_cnt + 1)) as i_7day_click_cnt
+,ceil(log2(i_7day_share_cnt + 1)) as i_7day_share_cnt
+,ceil(log2(i_7day_return_cnt + 1)) as i_7day_return_cnt
+,ceil(log2(i_3month_exp_cnt + 1)) as i_3month_exp_cnt
+,ceil(log2(i_3month_click_cnt + 1)) as i_3month_click_cnt
+,ceil(log2(i_3month_share_cnt + 1)) as i_3month_share_cnt
+,ceil(log2(i_3month_return_cnt + 1)) as i_3month_return_cnt
+,round(if(i_ctr_1day > 10.0, 10.0, i_ctr_1day) / 10.0, 6) as i_ctr_1day
+,round(if(i_str_1day > 10.0, 10.0, i_str_1day) / 10.0, 6) as i_str_1day
+,round(if(i_rov_1day > 10.0, 10.0, i_rov_1day) / 10.0, 6) as i_rov_1day
+,round(if(i_ros_1day > 10.0, 10.0, i_ros_1day) / 10.0, 6) as i_ros_1day
+,round(if(i_ctr_3day > 10.0, 10.0, i_ctr_3day) / 10.0, 6) as i_ctr_3day
+,round(if(i_str_3day > 10.0, 10.0, i_str_3day) / 10.0, 6) as i_str_3day
+,round(if(i_rov_3day > 10.0, 10.0, i_rov_3day) / 10.0, 6) as i_rov_3day
+,round(if(i_ros_3day > 10.0, 10.0, i_ros_3day) / 10.0, 6) as i_ros_3day
+,round(if(i_ctr_7day > 10.0, 10.0, i_ctr_7day) / 10.0, 6) as i_ctr_7day
+,round(if(i_str_7day > 10.0, 10.0, i_str_7day) / 10.0, 6) as i_str_7day
+,round(if(i_rov_7day > 10.0, 10.0, i_rov_7day) / 10.0, 6) as i_rov_7day
+,round(if(i_ros_7day > 10.0, 10.0, i_ros_7day) / 10.0, 6) as i_ros_7day
+,round(if(i_ctr_3month > 10.0, 10.0, i_ctr_3month) / 10.0, 6) as i_ctr_3month
+,round(if(i_str_3month > 10.0, 10.0, i_str_3month) / 10.0, 6) as i_str_3month
+,round(if(i_rov_3month > 10.0, 10.0, i_rov_3month) / 10.0, 6) as i_rov_3month
+,round(if(i_ros_3month > 10.0, 10.0, i_ros_3month) / 10.0, 6) as i_ros_3month
+from
+loghubods.alg_recsys_video_info
+where dt='{dt}'
+and length(videoid) > 0
+)
+SELECT
+i_id as k,
+*
+from candidate_item
+    """.format(dt=dt)
+    # log_.info(sql)
+    records = exe_sql(project, sql)
+    log_.info('sql_done')
+    records_process(records, process_and_store, max_size=50, num_workers=10)
+
+def timer_check(dt):
+    try:
+        project = config_.ad_model_data['ad_out_v1_item'].get('project')
+        table = config_.ad_model_data['ad_out_v1_item'].get('table')
+        now_date = datetime.datetime.today()
+        yesterday_date = now_date - datetime.timedelta(days=1)
+        now_dt = datetime.datetime.strftime(now_date, '%Y%m%d')
+        yesterday_dt = datetime.datetime.strftime(yesterday_date, '%Y%m%d')
+        log_.info(f"now_dt: {now_dt}")
+        if dt is not None:
+            yesterday_dt = dt
+        log_.info(f"update_dt: {yesterday_dt}")
+        now_min = datetime.datetime.now().minute
+        # 查看当前更新的数据是否已准备好
+        data_count = data_check(project=project, table=table, dt=yesterday_dt)
+        if data_count > 0:
+            log_.info('update_offline_score_item start! {}'.format(data_count))
+            # 数据准备好,进行更新
+            update_offline_score_item(dt=yesterday_dt)
+            log_.info('update_offline_score_item end!')
+        else:
+            # 数据没准备好,5分钟后重新检查
+            wait_seconds = 5 * 60
+            log_.info('data not ready, wait {}s'.format(wait_seconds))
+            Timer(wait_seconds, timer_check, args=(dt,)).start()
+
+    except Exception as e:
+        log_.error(f"用户广告跳出率预估离线item数据更新失败 exception: {e}, traceback: {traceback.format_exc()}")
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 用户广告跳出率预估离线item数据更新失败\n"
+                     f"exception: {e}\n"
+                     f"traceback: {traceback.format_exc()}"
+        )
+
+
+if __name__ == "__main__":
+    dt = None
+    if len(sys.argv) > 1:
+        dt = sys.argv[1]
+        log_.info('## 手动更新:{}'.format(dt))
+    else:
+        log_.info('## 自动更新')
+    timer_check(dt)
+
+

+ 10 - 0
ad_out_v1_get_offline_score_item_v2.sh

@@ -0,0 +1,10 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_out_v1_get_offline_score_item_v2.py $*
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_out_v1_get_offline_score_item_v2.py $*
+fi
+

+ 149 - 0
ad_out_v1_get_offline_score_user_v2.py

@@ -0,0 +1,149 @@
+#coding utf-8
+import sys
+import datetime
+import traceback
+from threading import Timer
+from tqdm import tqdm
+from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
+from config import set_config
+from log import Log
+from records_process import records_process
+
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+from feature import get_user_features as get_features
+from lr_model import LrModel
+from utils import exe_sql
+
+model_key = 'ad_out_v1'
+lr_model = LrModel('model/{}.json'.format(model_key))
+item_h_dict = {}
+key_name_prefix = f"{config_.KEY_NAME_PREFIX_AD_OUT_MODEL_SCORE_USER}{model_key}"
+print(key_name_prefix)
+# 过期时间:一周
+expire_time = 7 * 24 * 3600
+
+def process_and_store(row):
+    k = str(row['k'])
+    features = get_features(row)
+    h = lr_model.predict_h(features)
+    redis_helper.set_data_to_redis(f"{key_name_prefix}:{k}", round(h, 6), expire_time)
+
+def update_offline_score_user(dt):
+    project = 'loghubods'
+    sql = """
+--odps sql 
+--********************************************************************--
+--author:研发
+--create time:2023-12-11 23:54:20
+--********************************************************************--
+with candidate_user as (
+select
+-- 基础特征_用户
+mids AS u_id
+,machineinfo_brand AS u_brand
+,machineinfo_model AS u_device
+,SPLIT(machineinfo_system,' ')[0] AS u_system
+,machineinfo_system AS u_system_ver
+,province as ctx_region
+,city as ctx_city
+,u_cycle_bucket_7days
+,u_cycle_bucket_30days
+,u_share_bucket_30days
+,ceil(log2(u_1day_exp_cnt + 1)) as u_1day_exp_cnt
+,ceil(log2(u_1day_click_cnt + 1)) as u_1day_click_cnt
+,ceil(log2(u_1day_share_cnt + 1)) as u_1day_share_cnt
+,ceil(log2(u_1day_return_cnt + 1)) as u_1day_return_cnt
+,ceil(log2(u_3day_exp_cnt + 1)) as u_3day_exp_cnt
+,ceil(log2(u_3day_click_cnt + 1)) as u_3day_click_cnt
+,ceil(log2(u_3day_share_cnt + 1)) as u_3day_share_cnt
+,ceil(log2(u_3day_return_cnt + 1)) as u_3day_return_cnt
+,ceil(log2(u_7day_exp_cnt + 1)) as u_7day_exp_cnt
+,ceil(log2(u_7day_click_cnt + 1)) as u_7day_click_cnt
+,ceil(log2(u_7day_share_cnt + 1)) as u_7day_share_cnt
+,ceil(log2(u_7day_return_cnt + 1)) as u_7day_return_cnt
+,ceil(log2(u_3month_exp_cnt + 1)) as u_3month_exp_cnt
+,ceil(log2(u_3month_click_cnt + 1)) as u_3month_click_cnt
+,ceil(log2(u_3month_share_cnt + 1)) as u_3month_share_cnt
+,ceil(log2(u_3month_return_cnt + 1)) as u_3month_return_cnt
+,round(if(u_ctr_1day > 10.0, 10.0, u_ctr_1day) / 10.0, 6) as u_ctr_1day
+,round(if(u_str_1day > 10.0, 10.0, u_str_1day) / 10.0, 6) as u_str_1day
+,round(if(u_rov_1day > 10.0, 10.0, u_rov_1day) / 10.0, 6) as u_rov_1day
+,round(if(u_ros_1day > 10.0, 10.0, u_ros_1day) / 10.0, 6) as u_ros_1day
+,round(if(u_ctr_3day > 10.0, 10.0, u_ctr_3day) / 10.0, 6) as u_ctr_3day
+,round(if(u_str_3day > 10.0, 10.0, u_str_3day) / 10.0, 6) as u_str_3day
+,round(if(u_rov_3day > 10.0, 10.0, u_rov_3day) / 10.0, 6) as u_rov_3day
+,round(if(u_ros_3day > 10.0, 10.0, u_ros_3day) / 10.0, 6) as u_ros_3day
+,round(if(u_ctr_7day > 10.0, 10.0, u_ctr_7day) / 10.0, 6) as u_ctr_7day
+,round(if(u_str_7day > 10.0, 10.0, u_str_7day) / 10.0, 6) as u_str_7day
+,round(if(u_rov_7day > 10.0, 10.0, u_rov_7day) / 10.0, 6) as u_rov_7day
+,round(if(u_ros_7day > 10.0, 10.0, u_ros_7day) / 10.0, 6) as u_ros_7day
+,round(if(u_ctr_3month > 10.0, 10.0, u_ctr_3month) / 10.0, 6) as u_ctr_3month
+,round(if(u_str_3month > 10.0, 10.0, u_str_3month) / 10.0, 6) as u_str_3month
+,round(if(u_rov_3month > 10.0, 10.0, u_rov_3month) / 10.0, 6) as u_rov_3month
+,round(if(u_ros_3month > 10.0, 10.0, u_ros_3month) / 10.0, 6) as u_ros_3month
+from
+loghubods.alg_recsys_user_info
+where dt='{dt}'
+and length(mids) > 0
+and (u_3month_share_cnt > 0 or u_7day_click_cnt > 0 or u_3day_exp_cnt > 0)
+)
+SELECT
+u_id as k,
+*
+from candidate_user
+    """.format(dt=dt)
+    # log_.info(sql)
+    records = exe_sql(project, sql)
+    log_.info('sql_done')
+    records_process(records, process_and_store, max_size=50, num_workers=10)
+
+def timer_check(dt):
+    try:
+        project = config_.ad_model_data['ad_out_v1_user'].get('project')
+        table = config_.ad_model_data['ad_out_v1_user'].get('table')
+        now_date = datetime.datetime.today()
+        yesterday_date = now_date - datetime.timedelta(days=1)
+        now_dt = datetime.datetime.strftime(now_date, '%Y%m%d')
+        yesterday_dt = datetime.datetime.strftime(yesterday_date, '%Y%m%d')
+        log_.info(f"now_dt: {now_dt}")
+        if dt is not None:
+            yesterday_dt = dt
+        log_.info(f"update_dt: {yesterday_dt}")
+        now_min = datetime.datetime.now().minute
+        # 查看当前更新的数据是否已准备好
+        data_count = data_check(project=project, table=table, dt=yesterday_dt)
+        if data_count > 0:
+            log_.info('update_offline_score_user start! {}'.format(data_count))
+            # 数据准备好,进行更新
+            update_offline_score_user(dt=yesterday_dt)
+            log_.info('update_offline_score_user end!')
+        else:
+            # 数据没准备好,5分钟后重新检查
+            wait_seconds = 5 * 60
+            log_.info('data not ready, wait {}s'.format(wait_seconds))
+            Timer(wait_seconds, timer_check, args=(dt,)).start()
+
+    except Exception as e:
+        log_.error(f"用户广告跳出率预估离线user数据更新失败 exception: {e}, traceback: {traceback.format_exc()}")
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 用户广告跳出率预估离线user数据更新失败\n"
+                     f"exception: {e}\n"
+                     f"traceback: {traceback.format_exc()}"
+        )
+
+
+if __name__ == "__main__":
+    dt = None
+    if len(sys.argv) > 1:
+        dt = sys.argv[1]
+        log_.info('## 手动更新:{}'.format(dt))
+    else:
+        log_.info('## 自动更新')
+    timer_check(dt)
+
+

+ 10 - 0
ad_out_v1_get_offline_score_user_v2.sh

@@ -0,0 +1,10 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_out_v1_get_offline_score_user_v2.py $*
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_out_v1_get_offline_score_user_v2.py $*
+fi
+

+ 8 - 0
config.py

@@ -875,6 +875,14 @@ class BaseConfig(object):
             'project': 'loghubods',
             'table': 'user_video_features_data_final'
         },
+        'ad_out_v1_user': {
+            'project': 'loghubods',
+            'table': 'alg_recsys_user_info'
+        },
+        'ad_out_v1_item': {
+            'project': 'loghubods',
+            'table': 'alg_recsys_video_info'
+        },
         'user_group': {
             'project': 'loghubods',
             'table': 'user_share_return_admodel'

+ 6 - 3
recommend_region_data_status_update.py

@@ -1,3 +1,4 @@
+import datetime
 from config import set_config
 from log import Log
 from db_helper import RedisHelper
@@ -5,14 +6,16 @@ from db_helper import RedisHelper
 config_, _ = set_config()
 log_ = Log()
 
+now_date = datetime.datetime.today()
+log_.info(f"now_date: {datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
 redis_helper = RedisHelper()
 redis_helper.set_data_to_redis(
-    key_name=config_.RULE_24H_DATA_STATUS, value='0', expire_time=2 * 3600
+    key_name=f"{config_.RULE_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}", value='0', expire_time=2 * 3600
 )
 redis_helper.set_data_to_redis(
-    key_name=config_.REGION_24H_DATA_STATUS, value='0', expire_time=2 * 3600
+    key_name=f"{config_.REGION_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}", value='0', expire_time=2 * 3600
 )
 redis_helper.set_data_to_redis(
-    key_name=config_.RULE_H_DATA_STATUS, value='0', expire_time=2 * 3600
+    key_name=f"{config_.RULE_H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}", value='0', expire_time=2 * 3600
 )
 log_.info(f"recommend data status update to initial '0' finished!")

+ 3 - 3
region_rule_rank_h.py

@@ -804,9 +804,9 @@ def dup_to_redis_with_timecheck(h_video_ids, now_date, now_h, rule_key, h_rule_k
     # 获取并判断其他数据表更新状态
     redis_helper = RedisHelper()
     while True:
-        rule_24h_status = redis_helper.get_data_from_redis(key_name=config_.RULE_24H_DATA_STATUS)
-        region_24h_status = redis_helper.get_data_from_redis(key_name=config_.REGION_24H_DATA_STATUS)
-        rule_h_status = redis_helper.get_data_from_redis(key_name=config_.RULE_H_DATA_STATUS)
+        rule_24h_status = redis_helper.get_data_from_redis(key_name=f"{config_.RULE_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
+        region_24h_status = redis_helper.get_data_from_redis(key_name=f"{config_.REGION_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
+        rule_h_status = redis_helper.get_data_from_redis(key_name=f"{config_.RULE_H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
         if rule_24h_status == '1' and region_24h_status == '1' and rule_h_status == '1':
             # log_.info("dup data start ....")
             # ##### 去重更新不区分地域小时级列表,并另存为redis中

+ 2 - 2
region_rule_rank_h_by24h.py

@@ -497,7 +497,7 @@ def h_timer_check():
                         project=project, table=table, region_code_list=region_code_list)
             log_.info(f"region_24h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.REGION_24H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.REGION_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"region_24h_data status update to '1' finished!")
         elif now_min > 40:
@@ -505,7 +505,7 @@ def h_timer_check():
             h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list)
             log_.info(f"region_24h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.REGION_24H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.REGION_24H_DATA_STATUS}:{datetime.datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"region_24h_data status update to '1' finished!")
         else:

+ 3 - 3
rule_rank_h_by_24h.py

@@ -494,7 +494,7 @@ def h_timer_check():
             h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params)
             log_.info(f"24h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_24H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_24H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_24h_data status update to '1' finished!")
         elif h_data_count > 0:
@@ -503,7 +503,7 @@ def h_timer_check():
             rank_by_h(now_date=now_date, now_h=now_h, rule_params=rule_params, project=project, table=table)
             log_.info(f"24h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_24H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_24H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_24h_data status update to '1' finished!")
         elif now_min > 40:
@@ -511,7 +511,7 @@ def h_timer_check():
             h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params)
             log_.info(f"24h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_24H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_24H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_24h_data status update to '1' finished!")
         else:

+ 3 - 3
rule_rank_h_new.py

@@ -270,7 +270,7 @@ def h_timer_check():
             h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params)
             log_.info(f"h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_h_data status update to '1' finished!")
             return
@@ -282,7 +282,7 @@ def h_timer_check():
             rank_by_h(now_date=now_date, now_h=now_h, rule_params=rule_params, project=project, table=table)
             log_.info(f"h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_h_data status update to '1' finished!")
         elif now_min > 40:
@@ -290,7 +290,7 @@ def h_timer_check():
             h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params)
             log_.info(f"h_data end!")
             redis_helper.set_data_to_redis(
-                key_name=config_.RULE_H_DATA_STATUS, value='1', expire_time=2 * 3600
+                key_name=f"{config_.RULE_H_DATA_STATUS}:{datetime.strftime(now_date, '%Y%m%d%H')}", value='1', expire_time=2 * 3600
             )
             log_.info(f"rule_h_data status update to '1' finished!")
         else: