瀏覽代碼

Merge branch 'update-by-apptype' into pre-master

liqian 3 年之前
父節點
當前提交
6160c41746
共有 7 個文件被更改,包括 784 次插入366 次删除
  1. 94 40
      check_video_limit_distribute.py
  2. 240 19
      config.py
  3. 12 46
      redis_data_monitor.py
  4. 141 67
      region_rule_rank_h.py
  5. 128 87
      region_rule_rank_h_by24h.py
  6. 95 52
      rule_rank_h_by_24h.py
  7. 74 55
      videos_filter.py

+ 94 - 40
check_video_limit_distribute.py

@@ -1,3 +1,4 @@
+import gevent
 import datetime
 import numpy as np
 from config import set_config
@@ -93,6 +94,46 @@ def check_videos_distribute():
     return stop_distribute_video_id_list
 
 
+def process_with_region(app_type, data_key, rule_key, region, stop_distribute_video_id_list, now_date, now_h):
+    log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region}")
+    # 将已超分发视频加入到地域小时级线上过滤应用列表中
+    redis_helper.add_data_with_set(
+        key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{app_type}.{data_key}.{rule_key}",
+        values=stop_distribute_video_id_list,
+        expire_time=2 * 3600
+    )
+    # 将已超分发视频加入到地域分组24h的数据线上过滤应用列表中
+    redis_helper.add_data_with_set(
+        key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}",
+        values=stop_distribute_video_id_list,
+        expire_time=2 * 3600
+    )
+    # 将已超分发视频加入到不区分相对24h线上过滤应用列表中
+    redis_helper.add_data_with_set(
+        key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}",
+        values=stop_distribute_video_id_list,
+        expire_time=2 * 3600
+    )
+    # 将已超分发视频 移除 大列表
+    key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}" \
+               f"{region}.{app_type}.{data_key}.{rule_key}." \
+               f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+    if not redis_helper.key_exists(key_name=key_name):
+        if now_h == 0:
+            redis_date = now_date - datetime.timedelta(days=1)
+            redis_h = 23
+        else:
+            redis_date = now_date
+            redis_h = now_h - 1
+        key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}" \
+                   f"{region}.{app_type}.{data_key}.{rule_key}." \
+                   f"{datetime.datetime.strftime(redis_date, '%Y%m%d')}.{redis_h}"
+    redis_helper.remove_value_from_zset(key_name=key_name, value=stop_distribute_video_id_list)
+
+    log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region} "
+              f"videos check end!")
+
+
 def check_region_videos():
     """检查限流视频分发数"""
     # 获取当前日期
@@ -110,46 +151,59 @@ def check_region_videos():
 
     # 对已超分发的视频进行移除
     region_code_list = [code for region, code in config_.REGION_CODE.items()]
-    rule_params = config_.RULE_PARAMS_REGION
-
-    for region in region_code_list:
-        log_.info(f"region = {region}")
-        for key, value in rule_params.items():
-            log_.info(f"rule = {key}, param = {value}")
-            # 将已超分发视频加入到地域小时级线上过滤应用列表中
-            redis_helper.add_data_with_set(
-                key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{key}",
-                values=stop_distribute_video_id_list,
-                expire_time=2 * 3600
-            )
-            # 将已超分发视频加入到地域分组24h的数据线上过滤应用列表中
-            redis_helper.add_data_with_set(
-                key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{key}",
-                values=stop_distribute_video_id_list,
-                expire_time=2 * 3600
-            )
-            # 将已超分发视频加入到不区分相对24h线上过滤应用列表中
-            redis_helper.add_data_with_set(
-                key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{key}",
-                values=stop_distribute_video_id_list,
-                expire_time=2 * 3600
-            )
-            # 将已超分发视频 移除 大列表
-            key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}{region}.{key}." \
-                       f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
-            if not redis_helper.key_exists(key_name=key_name):
-                if now_h == 0:
-                    redis_date = now_date - datetime.timedelta(days=1)
-                    redis_h = 23
-                else:
-                    redis_date = now_date
-                    redis_h = now_h - 1
-                key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}{region}.{key}." \
-                           f"{datetime.datetime.strftime(redis_date, '%Y%m%d')}.{redis_h}"
-            redis_helper.remove_value_from_zset(key_name=key_name, value=stop_distribute_video_id_list)
-
-        log_.info(f"region = {region} videos check end!")
-    log_.info("region_h videos check end!")
+    rule_params = config_.RULE_PARAMS_REGION_APP_TYPE
+
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            for rule_key, rule_param in params['rule_params'].items():
+                log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+                task_list = [
+                    gevent.spawn(process_with_region, app_type, data_key, rule_key, region,
+                                 stop_distribute_video_id_list, now_date, now_h)
+                    for region in region_code_list
+                ]
+                gevent.joinall(task_list)
+
+                # for region in region_code_list:
+                #     log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region}")
+                #     # 将已超分发视频加入到地域小时级线上过滤应用列表中
+                #     redis_helper.add_data_with_set(
+                #         key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{app_type}.{data_key}.{rule_key}",
+                #         values=stop_distribute_video_id_list,
+                #         expire_time=2 * 3600
+                #     )
+                #     # 将已超分发视频加入到地域分组24h的数据线上过滤应用列表中
+                #     redis_helper.add_data_with_set(
+                #         key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}",
+                #         values=stop_distribute_video_id_list,
+                #         expire_time=2 * 3600
+                #     )
+                #     # 将已超分发视频加入到不区分相对24h线上过滤应用列表中
+                #     redis_helper.add_data_with_set(
+                #         key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}",
+                #         values=stop_distribute_video_id_list,
+                #         expire_time=2 * 3600
+                #     )
+                #     # 将已超分发视频 移除 大列表
+                #     key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}" \
+                #                f"{region}.{app_type}.{data_key}.{rule_key}." \
+                #                f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+                #     if not redis_helper.key_exists(key_name=key_name):
+                #         if now_h == 0:
+                #             redis_date = now_date - datetime.timedelta(days=1)
+                #             redis_h = 23
+                #         else:
+                #             redis_date = now_date
+                #             redis_h = now_h - 1
+                #         key_name = f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}" \
+                #                    f"{region}.{app_type}.{data_key}.{rule_key}." \
+                #                    f"{datetime.datetime.strftime(redis_date, '%Y%m%d')}.{redis_h}"
+                #     redis_helper.remove_value_from_zset(key_name=key_name, value=stop_distribute_video_id_list)
+                #
+                #     log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region} "
+                #               f"videos check end!")
 
     # 将已超分发视频 移除 原始大列表
     key_name = f"{config_.RECALL_KEY_NAME_PREFIX}{datetime.datetime.strftime(now_date, '%Y%m%d')}"

+ 240 - 19
config.py

@@ -155,6 +155,223 @@ class BaseConfig(object):
         'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0, 'platform_return_rate': 0.001},
     }
 
+    # ##### 区分appType数据
+    # 小时级更新过去24h数据 loghubods.video_data_each_hour_dataset_24h_total_apptype
+    PROJECT_24H_APP_TYPE = 'loghubods'
+    TABLE_24H_APP_TYPE = 'video_data_each_hour_dataset_24h_total_apptype'
+
+    # 小时级更新过去24h数据规则参数
+    RULE_PARAMS_24H_APP_TYPE = {
+        APP_TYPE['VLOG']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LONG_VIDEO']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+                'data2': [APP_TYPE['LONG_VIDEO'], ],
+                'data3': [APP_TYPE['VLOG'], APP_TYPE['LONG_VIDEO'], ],
+            }
+        },
+        APP_TYPE['LOVE_LIVE']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['SHORT_VIDEO']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LAO_HAO_KAN_VIDEO']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['ZUI_JING_QI']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['APP']: {
+            'rule_params': {
+                'rule2': {'cal_score_func': 2, 'return_count': 100, 'platform_return_rate': 0.001,
+                          'view_type': 'preview'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+    }
+
+    # 地域分组小时级更新24h使用数据  loghubods.video_each_day_update_province_24h_total_apptype
+    PROJECT_REGION_24H_APP_TYPE = 'loghubods'
+    TABLE_REGION_24H_APP_TYPE = 'video_each_day_update_province_24h_total_apptype'
+
+    # 地域分组小时级更新24h规则参数
+    RULE_PARAMS_REGION_24H_APP_TYPE = {
+        APP_TYPE['VLOG']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LONG_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+                'data2': [APP_TYPE['LONG_VIDEO'], ],
+                'data3': [APP_TYPE['VLOG'], APP_TYPE['LONG_VIDEO'], ],
+            }
+        },
+        APP_TYPE['LOVE_LIVE']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['SHORT_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LAO_HAO_KAN_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['ZUI_JING_QI']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['APP']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'return_count': 21, 'score_rule': 0,
+                          'platform_return_rate': 0.001},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+    }
+
+    # 地域分组小时级规则更新使用数据
+    PROJECT_REGION_APP_TYPE = 'loghubods'
+    TABLE_REGION_APP_TYPE = 'video_each_hour_update_province_apptype'
+
+    # 地域分组小时级规则参数
+    RULE_PARAMS_REGION_APP_TYPE = {
+        APP_TYPE['VLOG']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+                'rule3': {'view_type': 'video-show-region', 'platform_return_rate': 0.001,
+                          'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LONG_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+                'data2': [APP_TYPE['LONG_VIDEO'], ],
+                'data3': [APP_TYPE['VLOG'], APP_TYPE['LONG_VIDEO'], ],
+            }
+        },
+        APP_TYPE['LOVE_LIVE']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['SHORT_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['LAO_HAO_KAN_VIDEO']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['ZUI_JING_QI']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+        APP_TYPE['APP']: {
+            'rule_params': {
+                'rule2': {'view_type': 'video-show', 'platform_return_rate': 0.001, 'region_24h_rule_key': 'rule2'},
+            },
+            'data_params': {
+                'data1': [APP_TYPE['VLOG'], ],
+            }
+        },
+    }
+
     # 老视频更新使用数据
     OLD_VIDEOS_PROJECT = 'loghubods'
     OLD_VIDEOS_TABLE = 'xcx_test_video'
@@ -188,50 +405,54 @@ class BaseConfig(object):
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup.day.pre.{rule_key}.{date}
     RECALL_KEY_NAME_PREFIX_DUP_DAY_PRE = 'com.weiqu.video.recall.hot.item.score.dup.day.pre.'
 
-    # 小程序小时级24h数据更新结果存放 redis key前缀,完整格式:com.weiqu.video.recall.item.score.day.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_BY_24H = 'com.weiqu.video.recall.item.score.24h.'
+    # 小程序小时级24h数据更新结果存放 redis key前缀,
+    # 完整格式:com.weiqu.video.recall.item.score.apptype.24h.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_BY_24H = 'com.weiqu.video.recall.item.score.apptype.24h.'
     # 小程序离线ROV模型结果与小程序小时级24h更新结果去重后 存放 redis key前缀,
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup.24h.{rule_key}.{date}.{h}
     RECALL_KEY_NAME_PREFIX_DUP_24H = 'com.weiqu.video.recall.hot.item.score.dup.24h.'
-    # 小时级视频状态不符合推荐要求的列表 redis key,完整格式:com.weiqu.video.filter.h.item.24h.{rule_key}
-    H_VIDEO_FILER_24H = 'com.weiqu.video.filter.h.item.24h.'
+    # 小时级视频状态不符合推荐要求的列表 redis key,完整格式:com.weiqu.video.filter.apptype.h.item.24h.{appType}.{data_key}.{rule_key}
+    H_VIDEO_FILER_24H = 'com.weiqu.video.filter.apptype.h.item.24h.'
 
-    # 小程序地域分组小时级更新结果存放 redis key前缀,完整格式:com.weiqu.video.recall.item.score.region.h.{region}.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_REGION_BY_H = 'com.weiqu.video.recall.item.score.region.h.'
+    # 小程序地域分组小时级更新结果存放 redis key前缀,
+    # 完整格式:com.weiqu.video.recall.item.score.apptype.region.h.{region}.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_REGION_BY_H = 'com.weiqu.video.recall.item.score.apptype.region.h.'
     # 小程序地域分组天级更新结果与小程序地域分组小时级更新结果去重后 存放 redis key前缀,
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup1.region.day.h.{region}.{rule_key}.{date}.{h}
     RECALL_KEY_NAME_PREFIX_DUP1_REGION_DAY_H = 'com.weiqu.video.recall.hot.item.score.dup1.region.day.h.'
     # 小程序地域分组小时级更新24h结果与小程序地域分组小时级更新结果去重后 存放 redis key前缀,
-    # 完整格式:com.weiqu.video.recall.hot.item.score.dup1.region.24h.h.{region}.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H = 'com.weiqu.video.recall.hot.item.score.dup1.region.24h.h.'
+    # 完整格式:com.weiqu.video.recall.hot.item.score.dup1.apptype.region.24h.h.{region}.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H = 'com.weiqu.video.recall.hot.item.score.dup1.apptype.region.24h.h.'
     # 小程序天级更新结果与 小程序地域分组天级更新结果/小程序地域分组小时级更新结果 去重后 存放 redis key前缀,
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup2.region.day.h.{region}.{rule_key}.{date}.{h}
     RECALL_KEY_NAME_PREFIX_DUP2_REGION_DAY_H = 'com.weiqu.video.recall.hot.item.score.dup2.region.day.h.'
     # 小程序24h更新结果与 小程序地域分组24h更新结果/小程序地域分组小时级更新结果 去重后 存放 redis key前缀,
-    # 完整格式:com.weiqu.video.recall.hot.item.score.dup2.region.24h.h.{region}.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H = 'com.weiqu.video.recall.hot.item.score.dup2.region.24h.h.'
+    # 完整格式:com.weiqu.video.recall.hot.item.score.dup2.apptype.region.24h.h.{region}.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H = 'com.weiqu.video.recall.hot.item.score.dup2.apptype.region.24h.h.'
     # 小程序离线ROV模型结果与 小程序天级更新结果/小程序地域分组天级更新结果/小程序地域分组小时级更新结果 去重后 存放 redis key前缀,
-    # 完整格式:com.weiqu.video.recall.hot.item.score.dup.region.h.{region}.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_DUP_REGION_H = 'com.weiqu.video.recall.hot.item.score.dup.region.h.'
-    # 地域分组小时级视频状态不符合推荐要求的列表 redis key,完整格式:com.weiqu.video.filter.region.h.item.{region}.{rule_key}
-    REGION_H_VIDEO_FILER = 'com.weiqu.video.filter.region.h.item.'
+    # 完整格式:com.weiqu.video.recall.hot.item.score.dup.apptype.region.h.{region}.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_DUP_REGION_H = 'com.weiqu.video.recall.hot.item.score.dup.apptype.region.h.'
+    # 地域分组小时级视频状态不符合推荐要求的列表 redis key,
+    # 完整格式:com.weiqu.video.filter.apptype.region.h.item.{region}.{appType}.{data_key}.{rule_key}
+    REGION_H_VIDEO_FILER = 'com.weiqu.video.filter.apptype.region.h.item.'
     # 小时级视频状态不符合推荐要求的列表 redis key,完整格式:com.weiqu.video.filter.h.item.24h.{region}.{rule_key}
     # H_VIDEO_FILER_24H = 'com.weiqu.video.filter.h.item.24h.'
 
     # 小程序地域分组天级更新结果存放 redis key前缀,完整格式:com.weiqu.video.recall.item.score.region.day.{region}.{rule_key}.{date}
     RECALL_KEY_NAME_PREFIX_REGION_BY_DAY = 'com.weiqu.video.recall.item.score.region.day.'
 
-    # 小程序地域分组小时级更新24h结果存放 redis key前缀,完整格式:com.weiqu.video.recall.item.score.region.24h.{region}.{rule_key}.{date}.{h}
-    RECALL_KEY_NAME_PREFIX_REGION_BY_24H = 'com.weiqu.video.recall.item.score.region.24h.'
+    # 小程序地域分组小时级更新24h结果存放 redis key前缀,
+    # 完整格式:com.weiqu.video.recall.item.score.apptype.region.24h.{region}.{appType}.{data_key}.{rule_key}.{date}.{h}
+    RECALL_KEY_NAME_PREFIX_REGION_BY_24H = 'com.weiqu.video.recall.item.score.apptype.region.24h.'
     # 小程序天级更新结果与 小程序地域分组小时级更新24h结果 去重后 存放 redis key前缀,
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup.region.day.24h.{region}.{rule_key}.{date}.{h}
     RECALL_KEY_NAME_PREFIX_DUP_REGION_DAY_24H = 'com.weiqu.video.recall.hot.item.score.dup.region.day.24h.'
     # 小程序离线ROV模型结果与 小程序天级更新结果/小程序地域分组小时级更新24h结果 去重后 存放 redis key前缀,
     # 完整格式:com.weiqu.video.recall.hot.item.score.dup.region.24h.{region}.{rule_key}.{date}.{h}
     RECALL_KEY_NAME_PREFIX_DUP_REGION_24H = 'com.weiqu.video.recall.hot.item.score.dup.region.24h.'
-    # 地域分组小时级更新24h视频状态不符合推荐要求的列表 redis key,完整格式:com.weiqu.video.filter.region.h.item.24h.{region}.{rule_key}
-    REGION_H_VIDEO_FILER_24H = 'com.weiqu.video.filter.region.h.item.24h.'
-
+    # 地域分组小时级更新24h视频状态不符合推荐要求的列表 redis key,
+    # 完整格式:com.weiqu.video.filter.apptype.region.h.item.24h.{region}.{appType}.{data_key}.{rule_key}
+    REGION_H_VIDEO_FILER_24H = 'com.weiqu.video.filter.apptype.region.h.item.24h.'
 
     # 小程序老视频更新结果存放 redis key 前缀,完整格式:'com.weiqu.video.recall.old.item.{date}'
     RECALL_KEY_NAME_PREFIX_OLD_VIDEOS = 'com.weiqu.video.recall.old.item.'

+ 12 - 46
redis_data_monitor.py

@@ -7,43 +7,7 @@ config_, _ = set_config()
 log_ = Log()
 redis_helper = RedisHelper()
 
-region_code = {
-    '河北省': '130000',
-    '山西省': '140000',
-    '辽宁省': '210000',
-    '吉林省': '220000',
-    '黑龙江省': '230000',
-    '江苏省': '320000',
-    '浙江省': '330000',
-    '安徽省': '340000',
-    '福建省': '350000',
-    '江西省': '360000',
-    '山东省': '370000',
-    '河南省': '410000',
-    '湖北省': '420000',
-    '湖南省': '430000',
-    '广东省': '440000',
-    '海南省': '460000',
-    '四川省': '510000',
-    '贵州省': '520000',
-    '云南省': '530000',
-    '陕西省': '610000',
-    '甘肃省': '620000',
-    '青海省': '630000',
-    '台湾省': '710000',
-    '北京': '110000',
-    '天津': '120000',
-    '内蒙古': '150000',
-    '上海': '310000',
-    '广西': '450000',
-    '重庆': '500000',
-    '西藏': '540000',
-    '宁夏': '640000',
-    '新疆': '650000',
-    '香港': '810000',
-    '澳门': '820000',
-    'None': '-1'
-}
+region_code = config_.REGION_CODE
 
 
 def rov_data_monitor(now_date, now_h):
@@ -143,7 +107,7 @@ def get_redis_data_keys(now_date, now_h):
         rov_key_name = f"{config_.RECALL_KEY_NAME_PREFIX}{now_date}"
         redis_data_keys.append(rov_key_name)
     # 地域分组小时级列表
-    rule_params = config_.RULE_PARAMS_REGION
+    rule_params = config_.RULE_PARAMS_REGION_APP_TYPE
     key_prefix_list = [
         config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,
         config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,
@@ -151,14 +115,16 @@ def get_redis_data_keys(now_date, now_h):
         config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H
     ]
     region_code_list = [code for region, code in region_code.items()]
-    for rule_key, _ in rule_params.items():
-        for region in region_code_list:
-            if region == '-1':
-                region_key_name = [f"{key_prefix_list[-1]}{region}.{rule_key}.{now_date}.{now_h}"]
-            else:
-                region_key_name = [f"{key_prefix}{region}.{rule_key}.{now_date}.{now_h}"
-                                   for key_prefix in key_prefix_list]
-            redis_data_keys.extend(region_key_name)
+    for app_type, params in rule_params.items():
+        for data_key, data_param in params['data_params'].items():
+            for rule_key, rule_param in params['rule_params'].items():
+                for region in region_code_list:
+                    if region == '-1':
+                        region_key_name = [f"{key_prefix_list[-1]}{region}.{app_type}.{data_key}.{rule_key}.{now_date}.{now_h}"]
+                    else:
+                        region_key_name = [f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{now_date}.{now_h}"
+                                           for key_prefix in key_prefix_list]
+                    redis_data_keys.extend(region_key_name)
 
     return redis_data_keys
 

+ 141 - 67
region_rule_rank_h.py

@@ -4,9 +4,11 @@
 # @Time: 2022/5/5 15:54
 # @Software: PyCharm
 
+import gevent
 import datetime
 import pandas as pd
 import math
+from functools import reduce
 from odps import ODPS
 from threading import Timer
 from utils import MysqlHelper, RedisHelper, get_data_from_odps, filter_video_status
@@ -20,6 +22,7 @@ log_ = Log()
 region_code = config_.REGION_CODE
 
 features = [
+    'apptype',
     'code',
     'videoid',
     'lastonehour_preview',  # 过去1小时预曝光人数
@@ -124,7 +127,7 @@ def cal_score(df, param):
     return df
 
 
-def video_rank(df, now_date, now_h, rule_key, param, region):
+def video_rank(df, now_date, now_h, rule_key, param, region, app_type, data_key):
     """
     获取符合进入召回源条件的视频,与每日更新的rov模型结果视频列表进行合并
     :param df:
@@ -163,21 +166,22 @@ def video_rank(df, now_date, now_h, rule_key, param, region):
         h_recall_result[int(video_id)] = float(score)
         h_video_ids.append(int(video_id))
     h_recall_key_name = \
-        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H}{region}.{rule_key}.{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H}{region}.{app_type}.{data_key}.{rule_key}." \
+        f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
     if len(h_recall_result) > 0:
         redis_helper.add_data_with_zset(key_name=h_recall_key_name, data=h_recall_result, expire_time=23 * 3600)
         # 限流视频score调整
         update_limit_video_score(initial_videos=h_recall_result, key_name=h_recall_key_name)
         # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{rule_key}")
+        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{app_type}.{data_key}.{rule_key}")
 
     region_24h_rule_key = param.get('region_24h_rule_key', 'rule1')
     # 与其他召回视频池去重,存入对应的redis
     dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key,
-                 region_24h_rule_key=region_24h_rule_key, region=region)
+                 region_24h_rule_key=region_24h_rule_key, region=region, app_type=app_type, data_key=data_key)
 
 
-def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, region):
+def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, region, app_type, data_key):
     """将地域分组小时级数据与其他召回视频池去重,存入对应的redis"""
     redis_helper = RedisHelper()
     # # ##### 去重更新地域分组天级列表,并另存为redis中
@@ -202,7 +206,7 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
 
     # ##### 去重更新地域分组小时级24h列表,并另存为redis中
     region_24h_key_name = \
-        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H}{region}.{region_24h_rule_key}." \
+        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H}{region}.{app_type}.{data_key}.{region_24h_rule_key}." \
         f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
     if redis_helper.key_exists(key_name=region_24h_key_name):
         region_24h_data = redis_helper.get_all_data_from_zset(key_name=region_24h_key_name, with_scores=True)
@@ -214,14 +218,14 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
                 h_video_ids.append(int(video_id))
         log_.info(f"region 24h data dup count = {len(region_24h_dup)}")
         region_24h_dup_key_name = \
-            f"{config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H}{region}.{rule_key}." \
+            f"{config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H}{region}.{app_type}.{data_key}.{rule_key}." \
             f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
         if len(region_24h_dup) > 0:
             redis_helper.add_data_with_zset(key_name=region_24h_dup_key_name, data=region_24h_dup, expire_time=23 * 3600)
             # 限流视频score调整
             update_limit_video_score(initial_videos=region_24h_dup, key_name=region_24h_dup_key_name)
             # 清空线上过滤应用列表
-            redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{rule_key}")
+            # redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{app_type}.{data_key}.{region}.{rule_key}")
 
     # ##### 去重小程序天级更新结果,并另存为redis中
     # day_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_DAY}rule2.{datetime.datetime.strftime(now_date, '%Y%m%d')}"
@@ -242,7 +246,7 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
     #         redis_helper.add_data_with_zset(key_name=day_dup_key_name, data=day_dup, expire_time=23 * 3600)
 
     # ##### 去重小程序相对24h更新结果,并另存为redis中
-    day_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H}rule2." \
+    day_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H}{app_type}.{data_key}.rule2." \
                    f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
     if redis_helper.key_exists(key_name=day_key_name):
         day_data = redis_helper.get_all_data_from_zset(key_name=day_key_name, with_scores=True)
@@ -254,14 +258,14 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
                 h_video_ids.append(int(video_id))
         log_.info(f"24h data dup count = {len(day_dup)}")
         day_dup_key_name = \
-            f"{config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H}{region}.{rule_key}." \
+            f"{config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H}{region}.{app_type}.{data_key}.{rule_key}." \
             f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
         if len(day_dup) > 0:
             redis_helper.add_data_with_zset(key_name=day_dup_key_name, data=day_dup, expire_time=23 * 3600)
             # 限流视频score调整
             update_limit_video_score(initial_videos=day_dup, key_name=day_dup_key_name)
             # 清空线上过滤应用列表
-            redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{rule_key}")
+            redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}")
 
     # ##### 去重小程序模型更新结果,并另存为redis中
     model_key_name = get_rov_redis_key(now_date=now_date)
@@ -274,7 +278,7 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
             h_video_ids.append(int(video_id))
     log_.info(f"model data dup count = {len(model_data_dup)}")
     model_data_dup_key_name = \
-        f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}{region}.{rule_key}." \
+        f"{config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H}{region}.{app_type}.{data_key}.{rule_key}." \
         f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
     if len(model_data_dup) > 0:
         redis_helper.add_data_with_zset(key_name=model_data_dup_key_name, data=model_data_dup, expire_time=23 * 3600)
@@ -282,38 +286,102 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region_24h_rule_key, re
         update_limit_video_score(initial_videos=model_data_dup, key_name=model_data_dup_key_name)
 
 
+def merge_df(df_left, df_right):
+    """
+    df按照videoid, code 合并,对应特征求和
+    :param df_left:
+    :param df_right:
+    :return:
+    """
+    df_merged = pd.merge(df_left, df_right, on=['videoid', 'code'], how='outer', suffixes=['_x', '_y'])
+    df_merged.fillna(0, inplace=True)
+    feature_list = ['videoid', 'code']
+    for feature in features:
+        if feature in ['apptype', 'videoid', 'code']:
+            continue
+        df_merged[feature] = df_merged[f'{feature}_x'] + df_merged[f'{feature}_y']
+        feature_list.append(feature)
+    return df_merged[feature_list]
+
+
+def process_with_region(region, df_merged, app_type, data_key, rule_key, rule_param, now_date, now_h):
+    log_.info(f"region = {region}")
+    # 计算score
+    region_df = df_merged[df_merged['code'] == region]
+    log_.info(f'region_df count = {len(region_df)}')
+    score_df = cal_score(df=region_df, param=rule_param)
+    video_rank(df=score_df, now_date=now_date, now_h=now_h, rule_key=rule_key, param=rule_param,
+               region=region, app_type=app_type, data_key=data_key)
+
+
+def process_with_app_type(app_type, params, region_code_list, feature_df, now_date, now_h):
+    log_.info(f"app_type = {app_type}")
+    task_list = []
+    for data_key, data_param in params['data_params'].items():
+        log_.info(f"data_key = {data_key}, data_param = {data_param}")
+        df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+        df_merged = reduce(merge_df, df_list)
+        for rule_key, rule_param in params['rule_params'].items():
+            log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+            task_list.extend(
+                [
+                    gevent.spawn(process_with_region, region, df_merged, app_type, data_key, rule_key, rule_param,
+                                 now_date, now_h)
+                    for region in region_code_list
+                ]
+            )
+    gevent.joinall(task_list)
+
+
 def rank_by_h(project, table, now_date, now_h, rule_params, region_code_list):
     # 获取特征数据
     feature_df = get_feature_data(project=project, table=table, now_date=now_date)
-    # 获取所有的region
-    # region_code_list = list(set(feature_df[''].to_list()))
+    feature_df['apptype'] = feature_df['apptype'].astype(int)
+    t = [
+        gevent.spawn(process_with_app_type, app_type, params, region_code_list, feature_df, now_date, now_h)
+        for app_type, params in rule_params.items()
+    ]
+    gevent.joinall(t)
+
+    # for app_type, params in rule_params.items():
+    #     log_.info(f"app_type = {app_type}")
+    #     for data_key, data_param in params['data_params'].items():
+    #         log_.info(f"data_key = {data_key}, data_param = {data_param}")
+    #         df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+    #         df_merged = reduce(merge_df, df_list)
+    #         for rule_key, rule_param in params['rule_params'].items():
+    #             log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+    #             task_list = [
+    #                 gevent.spawn(process_with_region, region, df_merged, app_type, data_key, rule_key, rule_param, now_date, now_h)
+    #                 for region in region_code_list
+    #             ]
+    #             gevent.joinall(task_list)
+
     # rank
-    for key, value in rule_params.items():
-        log_.info(f"rule = {key}, param = {value}")
-        for region in region_code_list:
-            log_.info(f"region = {region}")
-            # 计算score
-            region_df = feature_df[feature_df['code'] == region]
-            log_.info(f'region_df count = {len(region_df)}')
-            score_df = cal_score(df=region_df, param=value)
-            video_rank(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value, region=region)
-            # to-csv
-            score_filename = f"score_{region}_{key}_{datetime.datetime.strftime(now_date, '%Y%m%d%H')}.csv"
-            score_df.to_csv(f'./data/{score_filename}')
-            # to-logs
-            log_.info({"date": datetime.datetime.strftime(now_date, '%Y%m%d%H'),
-                       "region_code": region,
-                       "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,
-                       "rule_key": key,
-                       # "score_df": score_df[['videoid', 'score']]
-                       }
-                      )
-
-
-def h_rank_bottom(now_date, now_h, rule_key, region_code_list, param):
+    # for key, value in rule_params.items():
+    #     log_.info(f"rule = {key}, param = {value}")
+    #     for region in region_code_list:
+    #         log_.info(f"region = {region}")
+    #         # 计算score
+    #         region_df = feature_df[feature_df['code'] == region]
+    #         log_.info(f'region_df count = {len(region_df)}')
+    #         score_df = cal_score(df=region_df, param=value)
+    #         video_rank(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value, region=region)
+    #         # to-csv
+    #         score_filename = f"score_{region}_{key}_{datetime.datetime.strftime(now_date, '%Y%m%d%H')}.csv"
+    #         score_df.to_csv(f'./data/{score_filename}')
+    #         # to-logs
+    #         log_.info({"date": datetime.datetime.strftime(now_date, '%Y%m%d%H'),
+    #                    "region_code": region,
+    #                    "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,
+    #                    "rule_key": key,
+    #                    # "score_df": score_df[['videoid', 'score']]
+    #                    }
+    #                   )
+
+
+def h_rank_bottom(now_date, now_h, rule_params, region_code_list):
     """未按时更新数据,用上一小时结果作为当前小时的数据"""
-    log_.info(f"rule_key = {rule_key}")
-    region_24h_rule_key = param.get('region_24h_rule_key', 'rule1')
     # 获取rov模型结果
     redis_helper = RedisHelper()
     if now_h == 0:
@@ -334,41 +402,48 @@ def h_rank_bottom(now_date, now_h, rule_key, region_code_list, param):
 
     # 以上一小时的地域分组数据作为当前小时的数据
     key_prefix = config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H
-    for region in region_code_list:
-        log_.info(f"region = {region}")
-        key_name = f"{key_prefix}{region}.{rule_key}.{redis_dt}.{redis_h}"
-        initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
-        if initial_data is None:
-            initial_data = []
-        final_data = dict()
-        h_video_ids = []
-        for video_id, score in initial_data:
-            final_data[video_id] = score
-            h_video_ids.append(int(video_id))
-        # 存入对应的redis
-        final_key_name = \
-            f"{key_prefix}{region}.{rule_key}.{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
-        if len(final_data) > 0:
-            redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
-        # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{rule_key}")
-        # 与其他召回视频池去重,存入对应的redis
-        dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h,
-                     rule_key=rule_key, region_24h_rule_key=region_24h_rule_key, region=region)
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            for rule_key, rule_param in params['rule_params'].items():
+                log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+                region_24h_rule_key = rule_param.get('region_24h_rule_key', 'rule1')
+                for region in region_code_list:
+                    log_.info(f"region = {region}")
+                    key_name = f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{redis_dt}.{redis_h}"
+                    initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
+                    if initial_data is None:
+                        initial_data = []
+                    final_data = dict()
+                    h_video_ids = []
+                    for video_id, score in initial_data:
+                        final_data[video_id] = score
+                        h_video_ids.append(int(video_id))
+                    # 存入对应的redis
+                    final_key_name = \
+                        f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+                    if len(final_data) > 0:
+                        redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
+                    # 清空线上过滤应用列表
+                    redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{app_type}.{data_key}.{rule_key}")
+                    # 与其他召回视频池去重,存入对应的redis
+                    dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key,
+                                 region_24h_rule_key=region_24h_rule_key, region=region,
+                                 app_type=app_type, data_key=data_key)
 
 
 def h_timer_check():
-    rule_params = config_.RULE_PARAMS_REGION
-    project = config_.PROJECT_REGION
-    table = config_.TABLE_REGION
+    rule_params = config_.RULE_PARAMS_REGION_APP_TYPE
+    project = config_.PROJECT_REGION_APP_TYPE
+    table = config_.TABLE_REGION_APP_TYPE
     region_code_list = [code for region, code in region_code.items()]
     now_date = datetime.datetime.today()
     log_.info(f"now_date: {datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
     now_h = datetime.datetime.now().hour
     now_min = datetime.datetime.now().minute
     if now_h == 0:
-        for key, value in rule_params.items():
-            h_rank_bottom(now_date=now_date, now_h=now_h, rule_key=key, region_code_list=region_code_list, param=value)
+        h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list)
         return
     # 查看当前小时更新的数据是否已准备好
     h_data_count = h_data_check(project=project, table=table, now_date=now_date)
@@ -379,8 +454,7 @@ def h_timer_check():
                   project=project, table=table, region_code_list=region_code_list)
     elif now_min > 50:
         log_.info('h_recall data is None, use bottom data!')
-        for key, value in rule_params.items():
-            h_rank_bottom(now_date=now_date, now_h=now_h, rule_key=key, region_code_list=region_code_list, param=value)
+        h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list)
     else:
         # 数据没准备好,1分钟后重新检查
         Timer(60, h_timer_check).start()

+ 128 - 87
region_rule_rank_h_by24h.py

@@ -4,9 +4,11 @@
 # @Time: 2022/5/5 15:54
 # @Software: PyCharm
 
+import gevent
 import datetime
 import pandas as pd
 import math
+from functools import reduce
 from odps import ODPS
 from threading import Timer
 from utils import RedisHelper, get_data_from_odps, filter_video_status
@@ -16,45 +18,10 @@ from log import Log
 config_, _ = set_config()
 log_ = Log()
 
-region_code = {
-    '河北省': '130000',
-    '山西省': '140000',
-    '辽宁省': '210000',
-    '吉林省': '220000',
-    '黑龙江省': '230000',
-    '江苏省': '320000',
-    '浙江省': '330000',
-    '安徽省': '340000',
-    '福建省': '350000',
-    '江西省': '360000',
-    '山东省': '370000',
-    '河南省': '410000',
-    '湖北省': '420000',
-    '湖南省': '430000',
-    '广东省': '440000',
-    '海南省': '460000',
-    '四川省': '510000',
-    '贵州省': '520000',
-    '云南省': '530000',
-    '陕西省': '610000',
-    '甘肃省': '620000',
-    '青海省': '630000',
-    '台湾省': '710000',
-    '北京': '110000',
-    '天津': '120000',
-    '内蒙古': '150000',
-    '上海': '310000',
-    '广西': '450000',
-    '重庆': '500000',
-    '西藏': '540000',
-    '宁夏': '640000',
-    '新疆': '650000',
-    '香港': '810000',
-    '澳门': '820000',
-    'None': '-1'
-}
+region_code = config_.REGION_CODE
 
 features = [
+    'apptype',
     'code',  # 省份编码
     'videoid',
     'lastday_preview',  # 昨日预曝光人数
@@ -153,7 +120,7 @@ def cal_score(df, param):
     return df
 
 
-def video_rank(df, now_date, now_h, rule_key, param, region):
+def video_rank(df, now_date, now_h, rule_key, param, region, app_type, data_key):
     """
     获取符合进入召回源条件的视频
     :param df:
@@ -192,39 +159,108 @@ def video_rank(df, now_date, now_h, rule_key, param, region):
         h_video_ids.append(int(video_id))
 
     day_recall_key_name = \
-        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H}{region}.{rule_key}." \
+        f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H}{region}.{app_type}.{data_key}.{rule_key}." \
         f"{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
     if len(day_recall_result) > 0:
         redis_helper.add_data_with_zset(key_name=day_recall_key_name, data=day_recall_result, expire_time=23 * 3600)
         # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{rule_key}")
+        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}")
+
     # 与其他召回视频池去重,存入对应的redis
     # dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key, region=region)
 
 
+def merge_df(df_left, df_right):
+    """
+    df按照videoid, code 合并,对应特征求和
+    :param df_left:
+    :param df_right:
+    :return:
+    """
+    df_merged = pd.merge(df_left, df_right, on=['videoid', 'code'], how='outer', suffixes=['_x', '_y'])
+    df_merged.fillna(0, inplace=True)
+    feature_list = ['videoid', 'code']
+    for feature in features:
+        if feature in ['apptype', 'videoid', 'code']:
+            continue
+        df_merged[feature] = df_merged[f'{feature}_x'] + df_merged[f'{feature}_y']
+        feature_list.append(feature)
+    return df_merged[feature_list]
+
+
+def process_with_region(region, df_merged, app_type, data_key, rule_key, rule_param, now_date, now_h):
+    log_.info(f"region = {region}")
+    # 计算score
+    region_df = df_merged[df_merged['code'] == region]
+    log_.info(f'region_df count = {len(region_df)}')
+    score_df = cal_score(df=region_df, param=rule_param)
+    video_rank(df=score_df, now_date=now_date, now_h=now_h,
+               rule_key=rule_key, param=rule_param, region=region,
+               app_type=app_type, data_key=data_key)
+
+
+def process_with_app_type(app_type, params, region_code_list, feature_df, now_date, now_h):
+    log_.info(f"app_type = {app_type}")
+    for data_key, data_param in params['data_params'].items():
+        log_.info(f"data_key = {data_key}, data_param = {data_param}")
+        df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+        df_merged = reduce(merge_df, df_list)
+        for rule_key, rule_param in params['rule_params'].items():
+            log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+            task_list = [
+                gevent.spawn(process_with_region, region, df_merged, app_type, data_key, rule_key, rule_param,
+                             now_date, now_h)
+                for region in region_code_list
+            ]
+            gevent.joinall(task_list)
+
+
 def rank_by_24h(project, table, now_date, now_h, rule_params, region_code_list):
     # 获取特征数据
     feature_df = get_feature_data(project=project, table=table, now_date=now_date)
+    feature_df['apptype'] = feature_df['apptype'].astype(int)
     # rank
-    for key, value in rule_params.items():
-        log_.info(f"rule = {key}, param = {value}")
-        for region in region_code_list:
-            log_.info(f"region = {region}")
-            # 计算score
-            region_df = feature_df[feature_df['code'] == region]
-            log_.info(f'region_df count = {len(region_df)}')
-            score_df = cal_score(df=region_df, param=value)
-            video_rank(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value, region=region)
-            # to-csv
-            score_filename = f"score_24h_{region}_{key}_{datetime.datetime.strftime(now_date, '%Y%m%d%H')}.csv"
-            score_df.to_csv(f'./data/{score_filename}')
-            # to-logs
-            log_.info({"date": datetime.datetime.strftime(now_date, '%Y%m%d%H'),
-                       "region_code": region,
-                       "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H,
-                       "rule_key": key,
-                       # "score_df": score_df[['videoid', 'score']]
-                       })
+    t = [
+        gevent.spawn(process_with_app_type, app_type, params, region_code_list, feature_df, now_date, now_h)
+        for app_type, params in rule_params.items()
+    ]
+    gevent.joinall(t)
+
+    # for app_type, params in rule_params.items():
+    #     log_.info(f"app_type = {app_type}")
+    #     for data_key, data_param in params['data_params'].items():
+    #         log_.info(f"data_key = {data_key}, data_param = {data_param}")
+    #         df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+    #         df_merged = reduce(merge_df, df_list)
+    #         for rule_key, rule_param in params['rule_params'].items():
+    #             log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+    #             task_list = [
+    #                 gevent.spawn(process_with_region, region, df_merged, app_type, data_key, rule_key, rule_param,
+    #                              now_date, now_h)
+    #                 for region in region_code_list
+    #             ]
+    #             gevent.joinall(task_list)
+
+
+    # for key, value in rule_params.items():
+    #     log_.info(f"rule = {key}, param = {value}")
+    #     for region in region_code_list:
+    #         log_.info(f"region = {region}")
+    #         # 计算score
+    #         region_df = feature_df[feature_df['code'] == region]
+    #         log_.info(f'region_df count = {len(region_df)}')
+    #         score_df = cal_score(df=region_df, param=value)
+    #         video_rank(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value, region=region)
+    #         # to-csv
+    #         score_filename = f"score_24h_{region}_{key}_{datetime.datetime.strftime(now_date, '%Y%m%d%H')}.csv"
+    #         score_df.to_csv(f'./data/{score_filename}')
+    #         # to-logs
+    #         log_.info({"date": datetime.datetime.strftime(now_date, '%Y%m%d%H'),
+    #                    "region_code": region,
+    #                    "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H,
+    #                    "rule_key": key,
+    #                    # "score_df": score_df[['videoid', 'score']]
+    #                    })
 
 
 def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region):
@@ -265,10 +301,8 @@ def dup_to_redis(h_video_ids, now_date, now_h, rule_key, region):
         redis_helper.add_data_with_zset(key_name=model_data_dup_key_name, data=model_data_dup, expire_time=23 * 3600)
 
 
-def h_rank_bottom(now_date, now_h, rule_key, region_code_list):
+def h_rank_bottom(now_date, now_h, rule_params, region_code_list):
     """未按时更新数据,用上一小时结果作为当前小时的数据"""
-    log_.info(f"rule_key = {rule_key}")
-    # 获取rov模型结果
     redis_helper = RedisHelper()
     if now_h == 0:
         redis_dt = datetime.datetime.strftime(now_date - datetime.timedelta(days=1), '%Y%m%d')
@@ -279,33 +313,40 @@ def h_rank_bottom(now_date, now_h, rule_key, region_code_list):
 
     # 以上一小时的地域分组数据作为当前小时的数据
     key_prefix = config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H
-    for region in region_code_list:
-        log_.info(f"region = {region}")
-        key_name = f"{key_prefix}{region}.{rule_key}.{redis_dt}.{redis_h}"
-        initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
-        if initial_data is None:
-            initial_data = []
-        final_data = dict()
-        h_video_ids = []
-        for video_id, score in initial_data:
-            final_data[video_id] = score
-            h_video_ids.append(int(video_id))
-        # 存入对应的redis
-        final_key_name = \
-            f"{key_prefix}{region}.{rule_key}.{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
-        if len(final_data) > 0:
-            redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
-        # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{rule_key}")
-        # 与其他召回视频池去重,存入对应的redis
-        dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key, region=region)
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            for rule_key, rule_param in params['rule_params'].items():
+                log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+                for region in region_code_list:
+                    log_.info(f"region = {region}")
+                    key_name = f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{redis_dt}.{redis_h}"
+                    initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
+                    if initial_data is None:
+                        initial_data = []
+                    final_data = dict()
+                    h_video_ids = []
+                    for video_id, score in initial_data:
+                        final_data[video_id] = score
+                        h_video_ids.append(int(video_id))
+                    # 存入对应的redis
+                    final_key_name = \
+                        f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{datetime.datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+                    if len(final_data) > 0:
+                        redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
+                    # 清空线上过滤应用列表
+                    redis_helper.del_keys(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{app_type}.{data_key}.{rule_key}")
+
+                    # 与其他召回视频池去重,存入对应的redis
+                    # dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key, region=region)
 
 
 def h_timer_check():
-    rule_params = config_.RULE_PARAMS_REGION_24H
-    project = config_.PROJECT_REGION_24H
-    table = config_.TABLE_REGION_24H
-    region_code_list = [code for region, code in region_code.items()]
+    rule_params = config_.RULE_PARAMS_REGION_24H_APP_TYPE
+    project = config_.PROJECT_REGION_24H_APP_TYPE
+    table = config_.TABLE_REGION_24H_APP_TYPE
+    region_code_list = [code for region, code in region_code.items() if code != '-1']
     now_date = datetime.datetime.today()
     now_h = datetime.datetime.now().hour
     now_min = datetime.datetime.now().minute
@@ -320,7 +361,7 @@ def h_timer_check():
     elif now_min > 50:
         log_.info('24h_recall data is None, use bottom data!')
         for key, _ in rule_params.items():
-            h_rank_bottom(now_date=now_date, now_h=now_h, rule_key=key, region_code_list=region_code_list)
+            h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list)
     else:
         # 数据没准备好,1分钟后重新检查
         Timer(60, h_timer_check).start()

+ 95 - 52
rule_rank_h_by_24h.py

@@ -1,5 +1,6 @@
 import pandas as pd
 import math
+from functools import reduce
 from odps import ODPS
 from threading import Timer
 from datetime import datetime, timedelta
@@ -13,6 +14,7 @@ config_, _ = set_config()
 log_ = Log()
 
 features = [
+    'apptype',
     'videoid',
     'preview人数',  # 过去24h预曝光人数
     'view人数',  # 过去24h曝光人数
@@ -119,7 +121,7 @@ def cal_score2(df, param):
     return df
 
 
-def video_rank_h(df, now_date, now_h, rule_key, param):
+def video_rank_h(df, now_date, now_h, rule_key, param, app_type, data_key):
     """
     获取符合进入召回源条件的视频,与每日更新的rov模型结果视频列表进行合并
     :param df:
@@ -127,15 +129,17 @@ def video_rank_h(df, now_date, now_h, rule_key, param):
     :param now_h:
     :param rule_key: 天级规则数据进入条件
     :param param: 天级规则数据进入条件参数
+    :param app_type:
+    :param data_key: 使用数据标识
     :return:
     """
-    # 获取rov模型结果
     redis_helper = RedisHelper()
-    key_name = get_rov_redis_key(now_date=now_date)
-    initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
-    if initial_data is None:
-        initial_data = []
-    log_.info(f'initial data count = {len(initial_data)}')
+    # 获取rov模型结果
+    # key_name = get_rov_redis_key(now_date=now_date)
+    # initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
+    # if initial_data is None:
+    #     initial_data = []
+    # log_.info(f'initial data count = {len(initial_data)}')
 
     # 获取符合进入召回源条件的视频
     return_count = param.get('return_count')
@@ -166,11 +170,11 @@ def video_rank_h(df, now_date, now_h, rule_key, param):
         day_recall_result[int(video_id)] = float(score)
         day_video_ids.append(int(video_id))
     day_recall_key_name = \
-        f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H}{rule_key}.{now_dt}.{now_h}"
+        f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H}{app_type}.{data_key}.{rule_key}.{now_dt}.{now_h}"
     if len(day_recall_result) > 0:
         redis_helper.add_data_with_zset(key_name=day_recall_key_name, data=day_recall_result, expire_time=23 * 3600)
         # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{rule_key}")
+        redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{app_type}.{data_key}.{rule_key}")
 
     # 去重更新rov模型结果,并另存为redis中
     # initial_data_dup = {}
@@ -184,34 +188,69 @@ def video_rank_h(df, now_date, now_h, rule_key, param):
     #     redis_helper.add_data_with_zset(key_name=initial_key_name, data=initial_data_dup, expire_time=23 * 3600)
 
 
+def merge_df(df_left, df_right):
+    """
+    df按照videoid 合并,对应特征求和
+    :param df_left:
+    :param df_right:
+    :return:
+    """
+    df_merged = pd.merge(df_left, df_right, on=['videoid'], how='outer', suffixes=['_x', '_y'])
+    df_merged.fillna(0, inplace=True)
+    feature_list = ['videoid']
+    for feature in features:
+        if feature in ['apptype', 'videoid']:
+            continue
+        df_merged[feature] = df_merged[f'{feature}_x'] + df_merged[f'{feature}_y']
+        feature_list.append(feature)
+    return df_merged[feature_list]
+
+
 def rank_by_h(now_date, now_h, rule_params, project, table):
     # 获取特征数据
     feature_df = get_feature_data(now_date=now_date, now_h=now_h, project=project, table=table)
+    feature_df['apptype'] = feature_df['apptype'].astype(int)
     # rank
-    for key, value in rule_params.items():
-        log_.info(f"rule = {key}, param = {value}")
-        # 计算score
-        cal_score_func = value.get('cal_score_func', 1)
-        if cal_score_func == 2:
-            score_df = cal_score2(df=feature_df, param=value)
-        else:
-            score_df = cal_score1(df=feature_df)
-        video_rank_h(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value)
-        # to-csv
-        score_filename = f"score_by24h_{key}_{datetime.strftime(now_date, '%Y%m%d%H')}.csv"
-        score_df.to_csv(f'./data/{score_filename}')
-        # to-logs
-        log_.info({"date": datetime.strftime(now_date, '%Y%m%d%H'),
-                   "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_BY_24H,
-                   "rule_key": key,
-                   # "score_df": score_df[['videoid', 'score']]
-                   })
-
-
-def h_rank_bottom(now_date, now_h, rule_key):
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            df_list = [feature_df[feature_df['apptype'] == apptype] for apptype in data_param]
+            df_merged = reduce(merge_df, df_list)
+            for rule_key, rule_param in params['rule_params'].items():
+                log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+                # 计算score
+                cal_score_func = rule_param.get('cal_score_func', 1)
+                if cal_score_func == 2:
+                    score_df = cal_score2(df=df_merged, param=rule_param)
+                else:
+                    score_df = cal_score1(df=df_merged)
+                video_rank_h(df=score_df, now_date=now_date, now_h=now_h, rule_key=rule_key, param=rule_param,
+                             app_type=app_type, data_key=data_key)
+
+
+    # for key, value in rule_params.items():
+    #     log_.info(f"rule = {key}, param = {value}")
+    #     # 计算score
+    #     cal_score_func = value.get('cal_score_func', 1)
+    #     if cal_score_func == 2:
+    #         score_df = cal_score2(df=feature_df, param=value)
+    #     else:
+    #         score_df = cal_score1(df=feature_df)
+    #     video_rank_h(df=score_df, now_date=now_date, now_h=now_h, rule_key=key, param=value)
+    #     # to-csv
+    #     score_filename = f"score_by24h_{key}_{datetime.strftime(now_date, '%Y%m%d%H')}.csv"
+    #     score_df.to_csv(f'./data/{score_filename}')
+    #     # to-logs
+    #     log_.info({"date": datetime.strftime(now_date, '%Y%m%d%H'),
+    #                "redis_key_prefix": config_.RECALL_KEY_NAME_PREFIX_BY_24H,
+    #                "rule_key": key,
+    #                # "score_df": score_df[['videoid', 'score']]
+    #                })
+
+
+def h_rank_bottom(now_date, now_h, rule_params):
     """未按时更新数据,用模型召回数据作为当前的数据"""
-    log_.info(f"rule_key = {rule_key}")
-    # 获取rov模型结果
     redis_helper = RedisHelper()
     if now_h == 0:
         redis_dt = datetime.strftime(now_date - timedelta(days=1), '%Y%m%d')
@@ -220,27 +259,32 @@ def h_rank_bottom(now_date, now_h, rule_key):
         redis_dt = datetime.strftime(now_date, '%Y%m%d')
         redis_h = now_h - 1
     key_prefix_list = [config_.RECALL_KEY_NAME_PREFIX_BY_24H, config_.RECALL_KEY_NAME_PREFIX_DUP_24H]
-    for key_prefix in key_prefix_list:
-        key_name = f"{key_prefix}{rule_key}.{redis_dt}.{redis_h}"
-        initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
-        if initial_data is None:
-            initial_data = []
-        final_data = dict()
-        for video_id, score in initial_data:
-            final_data[video_id] = score
-        # 存入对应的redis
-        final_key_name = \
-            f"{key_prefix}{rule_key}.{datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
-        if len(final_data) > 0:
-            redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
-        # 清空线上过滤应用列表
-        redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{rule_key}")
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            for rule_key, rule_param in params['rule_params'].items():
+                for key_prefix in key_prefix_list:
+                    key_name = f"{key_prefix}{app_type}.{data_key}.{rule_key}.{redis_dt}.{redis_h}"
+                    initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
+                    if initial_data is None:
+                        initial_data = []
+                    final_data = dict()
+                    for video_id, score in initial_data:
+                        final_data[video_id] = score
+                    # 存入对应的redis
+                    final_key_name = \
+                        f"{key_prefix}{app_type}.{data_key}.{rule_key}.{datetime.strftime(now_date, '%Y%m%d')}.{now_h}"
+                    if len(final_data) > 0:
+                        redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=23 * 3600)
+                    # 清空线上过滤应用列表
+                    redis_helper.del_keys(key_name=f"{config_.H_VIDEO_FILER_24H}{app_type}.{data_key}.{rule_key}")
 
 
 def h_timer_check():
-    project = config_.PROJECT_24H
-    table = config_.TABLE_24H
-    rule_params = config_.RULE_PARAMS_24H
+    project = config_.PROJECT_24H_APP_TYPE
+    table = config_.TABLE_24H_APP_TYPE
+    rule_params = config_.RULE_PARAMS_24H_APP_TYPE
     now_date = datetime.today()
     log_.info(f"now_date: {datetime.strftime(now_date, '%Y%m%d%H')}")
     now_min = datetime.now().minute
@@ -253,8 +297,7 @@ def h_timer_check():
         rank_by_h(now_date=now_date, now_h=now_h, rule_params=rule_params, project=project, table=table)
     elif now_min > 50:
         log_.info('h_by24h_recall data is None!')
-        for key, _ in rule_params.items():
-            h_rank_bottom(now_date=now_date, now_h=now_h, rule_key=key)
+        h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params)
     else:
         # 数据没准备好,1分钟后重新检查
         Timer(60, h_timer_check).start()

+ 74 - 55
videos_filter.py

@@ -1,3 +1,4 @@
+import gevent
 import os
 import time
 import json
@@ -14,6 +15,7 @@ from log import Log
 
 config_, env = set_config()
 log_ = Log()
+redis_helper = RedisHelper()
 
 
 def filter_position_videos():
@@ -566,10 +568,64 @@ def filter_old_videos():
     log_.info("old videos filter end!")
 
 
+def filter_process_with_region(app_type, data_key, rule_key, region, now_date, now_h):
+    log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region}")
+    # 需过滤视频列表
+    key_prefix_list = [
+        config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,
+        config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,
+        # config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_DAY_H,
+        # config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_DAY_H,
+        config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,
+        config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H
+    ]
+    for i, key_prefix in enumerate(key_prefix_list):
+        # 拼接key
+        key_name = f"{key_prefix}{region}.{app_type}.{data_key}.{rule_key}.{now_date}.{now_h}"
+        # 获取视频
+        data = redis_helper.get_all_data_from_zset(key_name=key_name)
+        if data is None:
+            log_.info("data is None")
+            log_.info("filter end!")
+            continue
+        # 过滤
+        video_ids = [int(video_id) for video_id in data]
+        filtered_result = filter_video_status(video_ids=video_ids)
+        # 求差集,获取需要过滤掉的视频,并从redis中移除
+        filter_videos = set(video_ids) - set(filtered_result)
+        log_.info("video_ids size = {}, filtered size = {}, filter sizer = {}".format(len(video_ids),
+                                                                                      len(filtered_result),
+                                                                                      len(filter_videos)))
+        log_.info({'key_name': key_name, 'filter_videos': filter_videos})
+
+        if len(filter_videos) == 0:
+            log_.info("filter end!")
+            continue
+        redis_helper.remove_value_from_zset(key_name=key_name, value=list(filter_videos))
+        if i == 0:
+            # 将小时级的数据需要过滤的视频加入到线上过滤应用列表中
+            redis_helper.add_data_with_set(key_name=f"{config_.REGION_H_VIDEO_FILER}"
+                                                    f"{region}.{app_type}.{data_key}.{rule_key}",
+                                           values=filter_videos, expire_time=2 * 3600)
+        elif i == 1:
+            # 将地域分组24h的数据需要过滤的视频加入到线上过滤应用列表中
+            redis_helper.add_data_with_set(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}"
+                                                    f"{region}.{app_type}.{data_key}.{rule_key}",
+                                           values=filter_videos, expire_time=2 * 3600)
+        elif i == 2:
+            # 将相对24h的数据需要过滤的视频加入到线上过滤应用列表中
+            redis_helper.add_data_with_set(key_name=f"{config_.H_VIDEO_FILER_24H}"
+                                                    f"{region}.{app_type}.{data_key}.{rule_key}",
+                                           values=filter_videos, expire_time=2 * 3600)
+
+    log_.info(f"app_type = {app_type}, data_key = {data_key}, rule_key = {rule_key}, region = {region} "
+              f"videos filter end!")
+
+
 def filter_region_videos():
     """过滤地域分组规则视频"""
     region_code_list = [code for region, code in region_code.items()]
-    rule_params = config_.RULE_PARAMS_REGION
+    rule_params = config_.RULE_PARAMS_REGION_APP_TYPE
     log_.info("region_h videos filter start ...")
     redis_helper = RedisHelper()
     # 获取当前日期
@@ -577,57 +633,20 @@ def filter_region_videos():
     # 获取当前所在小时
     now_h = datetime.now().hour
     log_.info(f'now_date = {now_date}, now_h = {now_h}.')
-    for region in region_code_list:
-        log_.info(f"region = {region}")
-        for key, value in rule_params.items():
-            log_.info(f"rule = {key}, param = {value}")
-            # 需过滤视频列表
-            key_prefix_list = [
-                config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H,
-                config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H,
-                # config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_DAY_H,
-                # config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_DAY_H,
-                config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H,
-                config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H
-            ]
-            for i, key_prefix in enumerate(key_prefix_list):
-                # 拼接key
-                key_name = f"{key_prefix}{region}.{key}.{now_date}.{now_h}"
-                log_.info(f"key_name: {key_name}")
-                # 获取视频
-                data = redis_helper.get_all_data_from_zset(key_name=key_name)
-                if data is None:
-                    log_.info("data is None")
-                    log_.info("filter end!")
-                    continue
-                # 过滤
-                video_ids = [int(video_id) for video_id in data]
-                filtered_result = filter_video_status(video_ids=video_ids)
-                # 求差集,获取需要过滤掉的视频,并从redis中移除
-                filter_videos = set(video_ids) - set(filtered_result)
-                log_.info("video_ids size = {}, filtered size = {}, filter sizer = {}".format(len(video_ids),
-                                                                                              len(filtered_result),
-                                                                                              len(filter_videos)))
-                log_.info({'key_name': key_name, 'filter_videos': filter_videos})
-
-                if len(filter_videos) == 0:
-                    log_.info("filter end!")
-                    continue
-                redis_helper.remove_value_from_zset(key_name=key_name, value=list(filter_videos))
-                if i == 0:
-                    # 将小时级的数据需要过滤的视频加入到线上过滤应用列表中
-                    redis_helper.add_data_with_set(key_name=f"{config_.REGION_H_VIDEO_FILER}{region}.{key}",
-                                                   values=filter_videos, expire_time=2 * 3600)
-                elif i == 1:
-                    # 将地域分组24h的数据需要过滤的视频加入到线上过滤应用列表中
-                    redis_helper.add_data_with_set(key_name=f"{config_.REGION_H_VIDEO_FILER_24H}{region}.{key}",
-                                                   values=filter_videos, expire_time=2 * 3600)
-                elif i == 2:
-                    # 将相对24h的数据需要过滤的视频加入到线上过滤应用列表中
-                    redis_helper.add_data_with_set(key_name=f"{config_.H_VIDEO_FILER_24H}{region}.{key}",
-                                                   values=filter_videos, expire_time=2 * 3600)
-
-        log_.info(f"region = {region} videos filter end!")
+    task_list = []
+    for app_type, params in rule_params.items():
+        log_.info(f"app_type = {app_type}")
+        for data_key, data_param in params['data_params'].items():
+            log_.info(f"data_key = {data_key}, data_param = {data_param}")
+            for rule_key, rule_param in params['rule_params'].items():
+                log_.info(f"rule_key = {rule_key}, rule_param = {rule_param}")
+                task_list.extend(
+                    [
+                        gevent.spawn(filter_process_with_region, app_type, data_key, rule_key, region, now_date, now_h)
+                        for region in region_code_list
+                    ]
+                )
+    gevent.joinall(task_list)
     log_.info("region_h videos filter end!")
 
 
@@ -877,11 +896,11 @@ def main():
         # 过滤地域分组小时级视频
         filter_region_videos()
         # 过滤地域分组天级视频
-        filter_region_videos_by_day()
+        # filter_region_videos_by_day()
         # 过滤小时级更新24h视频
-        filter_rov_h_24h()
+        # filter_rov_h_24h()
         # 过滤地域分组24h规则视频
-        filter_region_videos_24h()
+        # filter_region_videos_24h()
         # 过滤完整电影数据
         filter_whole_movies()
     except Exception as e: