123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260 |
- import time
- import traceback
- import json
- import random
- from my_config import set_config
- from my_utils import request_post, filter_video_status, send_msg_to_feishu, filter_video_status_app, \
- filter_political_videos
- from log import Log
- from db_helper import RedisHelper
- from odps import ODPS
- config_, _ = set_config()
- log_ = Log()
- def get_videos_from_flow_pool(app_type, size=1000):
- """
- 从流量池获取视频,循环获取,直到返回数据为None结束
- :param app_type: 产品标识 type-int
- :param size: 每次获取视频数量,type-int,默认1000
- :return: videos [{'videoId': 1111, 'flowPool': ''}, ...]
- """
- # 获取批次标识,利用首次获取数据时间戳为标记
- batch_flag = int(time.time()) * 1000 + 111
- log_.info(f"batch_flag: {batch_flag}")
- request_data = {'appType': app_type, 'batchFlag': batch_flag, 'size': size}
- videos = []
- retry = 0
- while True:
- result = request_post(request_url=config_.GET_VIDEOS_FROM_POOL_URL, request_data=request_data)
- if result is None:
- if retry > 2:
- break
- retry += 1
- continue
- if result['code'] != 0:
- log_.info('batch_flag: {}, 获取流量池视频失败'.format(batch_flag))
- if retry > 2:
- break
- retry += 1
- continue
- if not result['data']:
- if retry > 2:
- break
- retry += 1
- continue
- videos.extend(result['data'])
- return videos
- def update_remain_view_count(video_info_list):
- """
- 获取视频在流量池中的剩余可分发数,并存入对应的redis中
- :param app_type: 产品标识 type-int
- :param video_info_list: 视频信息 (视频id, 流量池标记) type-list,[(video_id, flow_pool), ...]
- :return: data type-list,[(video_id, flow_pool, view_count), ...]
- """
- redis_helper = RedisHelper()
- if not video_info_list:
- return dict()
- remain_videos = dict()
- # 每次请求10个
- for i in range(len(video_info_list)//10 + 1):
- remain_st_time = time.time()
- videos = [{'videoId': info[0], 'flowPool': info[1]} for info in video_info_list[i*10:(i+1)*10]]
- request_data = {'videos': videos}
- result = request_post(request_url=config_.GET_REMAIN_VIEW_COUNT_URL,
- request_data=request_data, timeout=(0.5, 3))
- log_.info(f"i = {i}, expend time = {(time.time()-remain_st_time)*1000}")
- if result is None:
- continue
- if result['code'] != 0:
- log_.error('获取视频在流量池中的剩余可分发数失败')
- continue
- for item in result['data']:
- if item['distributeCount'] is None:
- continue
- distribute_count = int(item['distributeCount'])
- if distribute_count > 0:
- remain_videos[item['videoId']]=distribute_count
- # 将分发数更新到本地记录
- key_name = f"{config_.LOCAL_DISTRIBUTE_COUNT_PREFIX}{item['videoId']}:{item['flowPool']}"
- redis_helper.set_data_to_redis(key_name=key_name, value=distribute_count, expire_time=25 * 60)
- else:
- # 将本地记录删除
- key_name = f"{config_.LOCAL_DISTRIBUTE_COUNT_PREFIX}{item['videoId']}:{item['flowPool']}"
- redis_helper.del_keys(key_name=key_name)
- log_.info(f"新增加不分发过滤前后整体数量: {len(video_info_list)}:{len(remain_videos)}")
- return remain_videos
- def get_flow_pool_recommend_config(flow_pool_id):
- """获取流量池推荐分发配置"""
- result = request_post(request_url=config_.GET_FLOW_POOL_RECOMMEND_CONFIG_URL)
- if result is None:
- return None
- if result['code'] != 0:
- return None
- flow_pool_distribute_config = result['data'].get('flowPoolDistributeConfig')
- if flow_pool_distribute_config:
- if int(eval(flow_pool_distribute_config).get('flowPoolId')) == flow_pool_id:
- return eval(eval(flow_pool_distribute_config).get('distributeRate'))
- else:
- return None
- else:
- return None
- def update_flow_pool(flow_pool_id_list):
- """
- 获取流量池可分发视频,并将结果上传Redis
- :param app_type: 产品标识 type-int
- :return: None
- """
- # 所有产品都从0取数据
- app_type = 0
- try:
- # 从流量池获取数据
- videos = get_videos_from_flow_pool(app_type=app_type)
- if len(videos) <= 0:
- log_.info('流量池中无需分发的视频')
- return
- # video_id 与 flow_pool, level 进行mapping
- video_ids = set()
- log_.info('流量池中视频数:{}'.format(len(videos)))
- mapping = {}
- for video in videos:
- flow_pool_id = video['flowPoolId'] # 召回使用的切分ID是在这里做的。流量池中的视频是不区分ID的,也不区分层。
- if int(flow_pool_id) not in flow_pool_id_list:
- continue
- # print(f"flow_pool_id: {flow_pool_id}")
- video_id = video['videoId']
- video_ids.add(video_id)
- item_info = {'flowPool': video['flowPool'], 'level': video['level']}
- if video_id in mapping:
- mapping[video_id].append(item_info)
- else:
- mapping[video_id] = [item_info]
- log_.info(f"需更新流量池视频数: {len(video_ids)}")
- # 对视频状态进行过滤
- filtered_videos = filter_video_status(list(video_ids))
- log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos)))
- if not filtered_videos:
- log_.info('流量池中视频状态不符合分发')
- return
- # 1. 更新分发数量,过滤分发数量为0的视频
- video_info_list = []
- for video_id in filtered_videos:
- for item in mapping.get(video_id):
- flow_pool = item['flowPool']
- video_info = (video_id, flow_pool)
- if video_info not in video_info_list:
- video_info_list.append(video_info)
- log_.info(f"video_info_list count = {len(video_info_list)}")
- remain_videos = update_remain_view_count(video_info_list)
- if not remain_videos:
- log_.info('流量池中视频状态不符合分发')
- return
- # 上传数据到redis
- quick_flow_pool_redis_data = set()
- flow_pool_redis_data = dict()
- level_weight = dict()
- for video_id, distribute_count in remain_videos.items():
- for item in mapping.get(video_id):
- flow_pool = item['flowPool']
- level = item['level']
- # 判断是否为快速曝光流量池视频
- value = '{}-{}'.format(video_id, flow_pool)
- flow_pool_id = int(flow_pool.split('#')[0]) # flowPool: 流量池ID#分级ID#级别Level#生命周期ID
- if flow_pool_id == config_.QUICK_FLOW_POOL_ID:
- quick_flow_pool_redis_data.add(value)
- else:
- if level not in flow_pool_redis_data:
- flow_pool_redis_data[level] = set()
- level_weight[level] = 0
- flow_pool_redis_data[level].add(value)
- level_weight[level]=level_weight[level]+distribute_count
- # 2. quick曝光池
- redis_helper = RedisHelper()
- quick_flow_pool_key_name = f"{config_.QUICK_FLOWPOOL_KEY_NAME_PREFIX_SET}{app_type}:{config_.QUICK_FLOW_POOL_ID}"
- # 如果key已存在,删除key
- if redis_helper.key_exists(quick_flow_pool_key_name):
- redis_helper.del_keys(quick_flow_pool_key_name)
- if quick_flow_pool_redis_data:
- log_.info(f"quick_flow_pool_redis_data = {quick_flow_pool_redis_data}")
- redis_helper.add_data_with_set(key_name=quick_flow_pool_key_name, values=quick_flow_pool_redis_data,
- expire_time=24 * 3600)
- # 快速流量池分发概率存入redis
- distribute_rate_key_name = f"{config_.QUICK_FLOWPOOL_DISTRIBUTE_RATE_KEY_NAME_PREFIX}{config_.QUICK_FLOW_POOL_ID}"
- distribute_rate = get_flow_pool_recommend_config(flow_pool_id=config_.QUICK_FLOW_POOL_ID)
- if distribute_rate is not None:
- redis_helper.set_data_to_redis(key_name=distribute_rate_key_name, value=distribute_rate,
- expire_time=15 * 60)
- # 3. 普通流量池
- for level, videos in flow_pool_redis_data.items():
- log_.info(f"level: {level}, videos_count: {len(videos)}")
- flow_pool_key_name = f"flow:pool:level:item:v2:{app_type}:{level}"
- # 如果key已存在,删除key
- if redis_helper.key_exists(flow_pool_key_name):
- redis_helper.del_keys(flow_pool_key_name)
- # 写入redis
- if videos:
- redis_helper.add_data_with_set(key_name=flow_pool_key_name, values=videos, expire_time=24 * 3600)
- result = redis_helper.get_data_from_set(flow_pool_key_name)
- if not result:
- result = []
- size = len(result)
- log_.info(f'写入成功key={flow_pool_key_name}:{size}')
- # 4. 写入权重
- weight=json.dumps(level_weight)
- redis_helper.set_data_to_redis(key_name="flow:pool:level:weight:v2", value=weight, expire_time=24*3600)
- log_.info(f'权重 {weight}')
- log_.info('data to redis finished!')
- except Exception as e:
- log_.error('流量池更新失败, appType: {} exception: {}, traceback: {}'.format(
- app_type, e, traceback.format_exc()))
- send_msg_to_feishu(
- webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
- key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
- msg_text='rov-offline{} - 流量池更新失败, appType: {}, exception: {}'.format(config_.ENV_TEXT, app_type, e)
- )
- if __name__ == '__main__':
- st_time = time.time()
- # 为避免第一个app_type获取数据不全,等待1min
- log_.info('flow pool predict start...')
- # 获取对应流量池id列表
- redis_helper = RedisHelper()
- flow_pool_abtest_config = redis_helper.get_data_from_redis(key_name=config_.FLOWPOOL_ABTEST_KEY_NAME)
- if flow_pool_abtest_config is not None:
- flow_pool_abtest_config = json.loads(flow_pool_abtest_config)
- else:
- flow_pool_abtest_config = {}
- flow_pool_id_list = flow_pool_abtest_config.get('experimental_flow_set_level', [])
- log_.info('predict start...')
- update_flow_pool(flow_pool_id_list=flow_pool_id_list)
- log_.info('predict end...')
- log_.info(f"expend time = {(time.time() - st_time) * 1000}ms")
- log_.info('flow pool predict end...')
- # python flowpool_data_update_with_level.py 测试环境必须手动执行python 才能有数据
|