Browse Source

update for abtest:117

liqian 2 năm trước cách đây
mục cha
commit
83da2a570f
6 tập tin đã thay đổi với 128 bổ sung75 xóa
  1. 31 23
      check_video_limit_distribute.py
  2. 11 8
      config.py
  3. 2 2
      redis_data_monitor.py
  4. 59 17
      region_rule_rank_h.py
  5. 2 2
      region_rule_rank_h_by24h.py
  6. 23 23
      rule_rank_h_by_24h.py

+ 31 - 23
check_video_limit_distribute.py

@@ -115,29 +115,37 @@ def process_with_region(data_key, rule_key, region, stop_distribute_video_id_lis
     #     expire_time=2 * 3600
     # )
 
-    if rule_key == 'rule4':
-        key_prefix_list = [
-            config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,  # 不区分地域相对24h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H,  # 不区分地域相对24h列表2
-            config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
-        ]
-    elif rule_key == 'rule5':
-        key_prefix_list = [
-            config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_48H_H,  # 不区分地域相对48h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_48H_H,  # 不区分地域相对48h列表2
-            config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
-        ]
-    else:
-        key_prefix_list = [
-            config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,  # 不区分地域相对24h列表
-            config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
-        ]
+    key_prefix_list = [
+        config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
+        config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
+        config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,  # 不区分地域相对24h列表
+        config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H,  # 不区分地域相对24h列表2
+        config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
+    ]
+
+    # if rule_key == 'rule4':
+    #     key_prefix_list = [
+    #         config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,  # 不区分地域相对24h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H,  # 不区分地域相对24h列表2
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
+    #     ]
+    # elif rule_key == 'rule5':
+    #     key_prefix_list = [
+    #         config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_48H_H,  # 不区分地域相对48h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_48H_H,  # 不区分地域相对48h列表2
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
+    #     ]
+    # else:
+    #     key_prefix_list = [
+    #         config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,  # 地域分组小时级列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,  # 地域分组相对24h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,  # 不区分地域相对24h列表
+    #         config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H,  # 大列表
+    #     ]
 
     for key_prefix in key_prefix_list:
         key_name = f"{key_prefix}{region}:{data_key}:{rule_key}:" \

+ 11 - 8
config.py

@@ -162,8 +162,8 @@ class BaseConfig(object):
     # 小时级更新过去24h数据规则参数
     RULE_PARAMS_24H_APP_TYPE = {
         'rule_params': {
-            'rule2': {'cal_score_func': 2, 'return_count': 40, 'platform_return_rate': 0.001,
-                      'view_type': 'preview'},
+            # 'rule2': {'cal_score_func': 2, 'return_count': 40, 'platform_return_rate': 0.001,
+            #           'view_type': 'preview'},
             'rule3': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
                       'view_type': 'preview'},
             'rule4': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
@@ -217,12 +217,14 @@ class BaseConfig(object):
     RULE_PARAMS_REGION_APP_TYPE = {
         'rule_params': {
             # 'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
-            'rule3': {'view_type': 'video-show-region', 'platform_return_rate': 0.001,
-                      'region_24h_rule_key': 'rule2', '24h_rule_key': 'rule2'},
+            # 'rule3': {'view_type': 'video-show-region', 'platform_return_rate': 0.001,
+            #           'region_24h_rule_key': 'rule2', '24h_rule_key': 'rule2'},
             'rule4': {'view_type': 'video-show-region', 'platform_return_rate': 0.001,
                       'region_24h_rule_key': 'rule2', '24h_rule_key': 'rule3'},
-            'rule6': {'view_type': 'preview', 'platform_return_rate': 0.001,
-                      'region_24h_rule_key': 'rule3', '24h_rule_key': 'rule2'},
+            # 'rule6': {'view_type': 'preview', 'platform_return_rate': 0.001,
+            #           'region_24h_rule_key': 'rule3', '24h_rule_key': 'rule2'},
+            'rule7': {'view_type': 'preview', 'platform_return_rate': 0.001,
+                      'region_24h_rule_key': 'rule4', '24h_rule_key': 'rule4', 'merge_func': 2},
         },
         'data_params': DATA_PARAMS,
         'params_list': [
@@ -234,6 +236,7 @@ class BaseConfig(object):
             # {'data': 'data4', 'rule': 'rule3'},
             # {'data': 'data6', 'rule': 'rule3'},
             # {'data': 'data7', 'rule': 'rule6'},
+            {'data': 'data6', 'rule': 'rule7'},
         ],
     }
 
@@ -773,8 +776,8 @@ class ProductionConfig(BaseConfig):
 
 def set_config():
     # 获取环境变量 ROV_OFFLINE_ENV
-    # env = os.environ.get('ROV_OFFLINE_ENV')
-    env = 'dev'
+    env = os.environ.get('ROV_OFFLINE_ENV')
+    # env = 'dev'
     if env is None:
         # log_.error('ENV ERROR: is None!')
         return

+ 2 - 2
redis_data_monitor.py

@@ -36,8 +36,8 @@ def region_data_monitor(now_date, now_h, rule_params, key_prefix_dict):
         data_key = param.get('data')
         rule_key = param.get('rule')
         for key_con, key_prefix in key_prefix_dict.items():
-            if key_con == '不区分地域相对24h筛选后剩余去重后数据' and rule_key != 'rule4':
-                continue
+            # if key_con == '不区分地域相对24h筛选后剩余去重后数据' and rule_key != 'rule4':
+            #     continue
             no_update_region_list = []
             for region in region_code_list:
                 region_key_name = f"{key_prefix}{region}:{data_key}:{rule_key}:{now_date}:{now_h}"

+ 59 - 17
region_rule_rank_h.py

@@ -267,14 +267,14 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, by
                                dup_key_name=h_24h_dup_key_name, region=region)
 
         # ##### 去重小程序相对24h 筛选后剩余数据 更新结果,并另存为redis中
-        if by_24h_rule_key == 'rule3':
-            other_h_24h_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{data_key}:" \
-                                   f"{by_24h_rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
-            other_h_24h_dup_key_name = \
-                f"{config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H}{region}:{data_key}:{rule_key}:" \
-                f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
-            h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=other_h_24h_key_name,
-                                   dup_key_name=other_h_24h_dup_key_name, region=region)
+        # if by_24h_rule_key in ['rule3', 'rule4']:
+        other_h_24h_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{data_key}:" \
+                               f"{by_24h_rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
+        other_h_24h_dup_key_name = \
+            f"{config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H}{region}:{data_key}:{rule_key}:" \
+            f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
+        h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=other_h_24h_key_name,
+                               dup_key_name=other_h_24h_dup_key_name, region=region)
 
     # ##### 去重小程序模型更新结果,并另存为redis中
     model_key_name = get_rov_redis_key(now_date=now_date)
@@ -458,6 +458,21 @@ def merge_df(df_left, df_right):
     return df_merged[feature_list]
 
 
+def merge_df_with_score(df_left, df_right):
+    """
+    df 按照[videoid, code]合并,平台回流人数、回流人数、分数 分别求和
+    :param df_left:
+    :param df_right:
+    :return:
+    """
+    df_merged = pd.merge(df_left, df_right, on=['videoid', 'code'], how='outer', suffixes=['_x', '_y'])
+    df_merged.fillna(0, inplace=True)
+    feature_list = ['videoid', 'code', 'lastonehour_return', 'platform_return', 'score']
+    for feature in feature_list[2:]:
+        df_merged[feature] = df_merged[f'{feature}_x'] + df_merged[f'{feature}_y']
+    return df_merged[feature_list]
+
+
 def process_with_region(region, df_merged, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag):
     log_.info(f"region = {region} start...")
     # 计算score
@@ -469,6 +484,14 @@ def process_with_region(region, df_merged, data_key, rule_key, rule_param, now_d
     log_.info(f"region = {region} end!")
 
 
+def process_with_region2(region, df_merged, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag):
+    log_.info(f"region = {region} start...")
+    region_score_df = df_merged[df_merged['code'] == region]
+    log_.info(f'region = {region}, region_score_df count = {len(region_score_df)}')
+    video_rank(df=region_score_df, now_date=now_date, now_h=now_h, region=region,
+               rule_key=rule_key, param=rule_param, data_key=data_key, rule_rank_h_flag=rule_rank_h_flag)
+    log_.info(f"region = {region} end!")
+
 
 def process_with_app_type(app_type, params, region_code_list, feature_df, now_date, now_h, rule_rank_h_flag):
     log_.info(f"app_type = {app_type} start...")
@@ -520,19 +543,38 @@ def process_with_param(param, data_params_item, rule_params_item, region_code_li
     data_key = param.get('data')
     data_param = data_params_item.get(data_key)
     log_.info(f"data_key = {data_key}, data_param = {data_param}")
-    df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
-    df_merged = reduce(merge_df, df_list)
-
     rule_key = param.get('rule')
     rule_param = rule_params_item.get(rule_key)
     log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
-    task_list = [
-        gevent.spawn(process_with_region,
-                     region, df_merged, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag)
-        for region in region_code_list
-    ]
-    gevent.joinall(task_list)
+    merge_func = rule_param.get('merge_func', None)
+
+    if merge_func == 2:
+        score_df_list = []
+        for apptype, weight in data_param.items():
+            df = feature_df[feature_df['apptype'] == apptype]
+            # 计算score
+            score_df = cal_score(df=df, param=rule_param)
+            score_df['score'] = score_df['score'] * weight
+            score_df_list.append(score_df)
+        # 分数合并
+        df_merged = reduce(merge_df_with_score, score_df_list)
+        # 更新平台回流比
+        df_merged['platform_return_rate'] = df_merged['platform_return'] / df_merged['lastonehour_return']
+        task_list = [
+            gevent.spawn(process_with_region2,
+                         region, df_merged, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag)
+            for region in region_code_list
+        ]
+    else:
+        df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+        df_merged = reduce(merge_df, df_list)
+        task_list = [
+            gevent.spawn(process_with_region,
+                         region, df_merged, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag)
+            for region in region_code_list
+        ]
 
+    gevent.joinall(task_list)
     log_.info(f"param = {param} end!")
 
 

+ 2 - 2
region_rule_rank_h_by24h.py

@@ -199,7 +199,7 @@ def merge_df(df_left, df_right):
 
 def merge_df_with_score(df_left, df_right):
     """
-    df 按照videoid合并,平台回流人数、回流人数、分数 分别求和
+    df 按照[videoid, code]合并,平台回流人数、回流人数、分数 分别求和
     :param df_left:
     :param df_right:
     :return:
@@ -301,7 +301,7 @@ def rank_by_24h(project, table, now_date, now_h, rule_params, region_code_list):
     rule_params_item = rule_params.get('rule_params')
     params_list = rule_params.get('params_list')
     pool = multiprocessing.Pool(processes=len(params_list))
-    for param in params_list[1:]:
+    for param in params_list:
         pool.apply_async(
             func=process_with_param,
             args=(param, data_params_item, rule_params_item, region_code_list, feature_df, now_date, now_h)

+ 23 - 23
rule_rank_h_by_24h.py

@@ -178,29 +178,29 @@ def video_rank_h(df, now_date, now_h, rule_key, param, data_key):
         # 清空线上过滤应用列表
         # redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{app_type}.{data_key}.{rule_key}")
 
-    if rule_key in ['rule3', 'rule4']:
-        # 去重筛选结果,保留剩余数据并写入Redis
-        all_videos = df['videoid'].to_list()
-        log_.info(f'h_by24h_recall all videos count = {len(all_videos)}')
-        # 视频状态过滤
-        all_filtered_videos = filter_video_status(all_videos)
-        log_.info(f'all_filtered_videos count = {len(all_filtered_videos)}')
-        # 与筛选结果去重
-        other_videos = [video for video in all_filtered_videos if video not in day_video_ids]
-        log_.info(f'other_videos count = {len(other_videos)}')
-        # 写入对应的redis
-        other_24h_recall_result = {}
-        for video_id in other_videos:
-            score = df[df['videoid'] == video_id]['score']
-            other_24h_recall_result[int(video_id)] = float(score)
-        # other_h_24h_recall_key_name = \
-        #     f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{app_type}:{data_key}:{rule_key}:{now_dt}:{now_h}"
-        other_h_24h_recall_key_name = \
-            f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{data_key}:{rule_key}:{now_dt}:{now_h}"
-        if len(other_24h_recall_result) > 0:
-            log_.info(f"count = {len(other_24h_recall_result)}")
-            redis_helper.add_data_with_zset(key_name=other_h_24h_recall_key_name, data=other_24h_recall_result,
-                                            expire_time=2 * 3600)
+    # if rule_key in ['rule3', 'rule4']:
+    # 去重筛选结果,保留剩余数据并写入Redis
+    all_videos = df['videoid'].to_list()
+    log_.info(f'h_by24h_recall all videos count = {len(all_videos)}')
+    # 视频状态过滤
+    all_filtered_videos = filter_video_status(all_videos)
+    log_.info(f'all_filtered_videos count = {len(all_filtered_videos)}')
+    # 与筛选结果去重
+    other_videos = [video for video in all_filtered_videos if video not in day_video_ids]
+    log_.info(f'other_videos count = {len(other_videos)}')
+    # 写入对应的redis
+    other_24h_recall_result = {}
+    for video_id in other_videos:
+        score = df[df['videoid'] == video_id]['score']
+        other_24h_recall_result[int(video_id)] = float(score)
+    # other_h_24h_recall_key_name = \
+    #     f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{app_type}:{data_key}:{rule_key}:{now_dt}:{now_h}"
+    other_h_24h_recall_key_name = \
+        f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{data_key}:{rule_key}:{now_dt}:{now_h}"
+    if len(other_24h_recall_result) > 0:
+        log_.info(f"count = {len(other_24h_recall_result)}")
+        redis_helper.add_data_with_zset(key_name=other_h_24h_recall_key_name, data=other_24h_recall_result,
+                                        expire_time=2 * 3600)
 
     # 去重更新rov模型结果,并另存为redis中
     # initial_data_dup = {}