import datetime import random import time import os import traceback import random from config import set_config from utils import request_post, filter_video_status, send_msg_to_feishu from log import Log from db_helper import RedisHelper from odps import ODPS config_, _ = set_config() log_ = Log() def get_videos_from_flow_pool(app_type, size=1000): """ 从流量池获取视频,循环获取,直到返回数据为None结束 :param app_type: 产品标识 type-int :param size: 每次获取视频数量,type-int,默认1000 :return: videos [{'videoId': 1111, 'flowPool': ''}, ...] """ # 获取批次标识,利用首次获取数据时间戳为标记 batch_flag = int(time.time()) request_data = {'appType': app_type, 'batchFlag': batch_flag, 'size': size} videos = [] while True: result = request_post(request_url=config_.GET_VIDEOS_FROM_POOL_URL, request_data=request_data) if result is None: break if result['code'] != 0: log_.info('batch_flag: {}, 获取流量池视频失败'.format(batch_flag)) break if not result['data']: break videos.extend(result['data']) return videos def get_videos_remain_view_count(app_type, videos_info): """ 获取视频在流量池中的剩余可分发数 :param app_type: 产品标识 type-int :param videos_info: 视频信息 (视频id, 流量池标记) type-list,[(video_id, flow_pool), ...] :return: data type-list,[(video_id, flow_pool, view_count), ...] """ if not videos_info: return [] videos = [{'videoId': info[0], 'flowPool': info[1]} for info in videos_info] request_data = {'appType': app_type, 'videos': videos} result = request_post(request_url=config_.GET_REMAIN_VIEW_COUNT_URL, request_data=request_data) if result is None: return [] if result['code'] != 0: log_.info('获取视频在流量池中的剩余可分发数失败') return [] data = [(item['videoId'], item['flowPool'], item['viewCount']) for item in result['data']] return data def get_flow_pool_recommend_config(flow_pool_id): """获取流量池推荐分发配置""" result = request_post(request_url=config_.GET_FLOW_POOL_RECOMMEND_CONFIG_URL) if result is None: return None if result['code'] != 0: return None flow_pool_distribute_config = result['data'].get('flowPoolDistributeConfig') if flow_pool_distribute_config: if int(eval(flow_pool_distribute_config).get('flowPoolId')) == flow_pool_id: return eval(eval(flow_pool_distribute_config).get('distributeRate')) else: return None else: return None def get_score(video_ids): # 以[0, 100]之间的随机浮点数作为score return [random.uniform(0, 100) for _ in range(len(video_ids))] def predict(app_type): """ 对流量池视频排序,并将结果上传Redis :param app_type: 产品标识 type-int :return: None """ try: # 从流量池获取数据 videos = get_videos_from_flow_pool(app_type=app_type) if len(videos) <= 0: log_.info('流量池中无需分发的视频') return None # video_id 与 flow_pool 进行mapping video_ids = set() log_.info('流量池中视频数:{}'.format(len(videos))) mapping = {} for video in videos: video_id = video['videoId'] video_ids.add(video_id) if video_id in mapping: mapping[video_id].append(video['flowPool']) else: mapping[video_id] = [video['flowPool']] # 对视频状态进行过滤 filtered_videos = filter_video_status(list(video_ids)) log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos))) if not filtered_videos: log_.info('流量池中视频状态不符合分发') return None # 预测 video_score = get_score(filtered_videos) log_.info('predict finished!') # 上传数据到redis redis_data = {} quick_flow_pool_redis_data = {} for i in range(len(video_score)): video_id = filtered_videos[i] score = video_score[i] for flow_pool in mapping.get(video_id): # 判断是否为快速曝光流量池视频 value = '{}-{}'.format(video_id, flow_pool) flow_pool_id = int(flow_pool.split('#')[0]) # flowPool: 流量池ID#分级ID#级别Level#生命周期ID if flow_pool_id == config_.QUICK_FLOW_POOL_ID: quick_flow_pool_redis_data[value] = score else: redis_data[value] = score # 快速曝光流量池视频写入redis redis_helper = RedisHelper() quick_flow_pool_key_name = f"{config_.QUICK_FLOWPOOL_KEY_NAME_PREFIX}{app_type}.{config_.QUICK_FLOW_POOL_ID}" # 如果key已存在,删除key if redis_helper.key_exists(quick_flow_pool_key_name): redis_helper.del_keys(quick_flow_pool_key_name) # 写入redis if quick_flow_pool_redis_data: log_.info(f"quick_flow_pool_redis_data = {quick_flow_pool_redis_data}") redis_helper.add_data_with_zset(key_name=quick_flow_pool_key_name, data=quick_flow_pool_redis_data, expire_time=24 * 3600) # 快速流量池分发概率存入redis distribute_rate_key_name = f"{config_.QUICK_FLOWPOOL_DISTRIBUTE_RATE_KEY_NAME_PREFIX}{config_.QUICK_FLOW_POOL_ID}" distribute_rate = get_flow_pool_recommend_config(flow_pool_id=config_.QUICK_FLOW_POOL_ID) if distribute_rate is not None: redis_helper.set_data_to_redis(key_name=distribute_rate_key_name, value=distribute_rate, expire_time=15 * 60) # 普通流量池视频写入redis flow_pool_key_name = f"{config_.FLOWPOOL_KEY_NAME_PREFIX}{app_type}" # 如果key已存在,删除key if redis_helper.key_exists(flow_pool_key_name): redis_helper.del_keys(flow_pool_key_name) # 写入redis if redis_data: redis_helper.add_data_with_zset(key_name=flow_pool_key_name, data=redis_data, expire_time=24 * 3600) log_.info('data to redis finished!') except Exception as e: log_.error('流量池更新失败, appType: {} exception: {}, traceback: {}'.format( app_type, e, traceback.format_exc())) send_msg_to_feishu( webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'), key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'), msg_text='rov-offline{} - 流量池更新失败, appType: {}, exception: {}'.format(config_.ENV_TEXT, app_type, e) ) def get_data_from_odps(project, sql): """检查数据是否准备好""" odps = ODPS( access_id=config_.ODPS_CONFIG['ACCESSID'], secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'], project=project, endpoint=config_.ODPS_CONFIG['ENDPOINT'], connect_timeout=3000, read_timeout=500000, pool_maxsize=1000, pool_connections=1000 ) try: with odps.execute_sql(sql=sql).open_reader() as reader: data_df = reader.to_pandas() except Exception as e: data_df = None return data_df def predict_18_19(app_type): log_.info(f'app_type = {app_type}') now = datetime.datetime.now() log_.info(f"now = {datetime.datetime.strftime(now, '%Y-%m-%d %H:%M:%S')}") # create_time = datetime.datetime.strftime(now - datetime.timedelta(hours=24), '%Y-%m-%d %H:%M:%S') create_time = '2022-04-22 16:40:00' if app_type == config_.APP_TYPE['LAO_HAO_KAN_VIDEO']: sql = f"SELECT video_id FROM videoods.movie_store_video_allow_list " \ f"WHERE allow_list_type=1 AND create_time>='{create_time}'" # elif app_type == config_.APP_TYPE['ZUI_JING_QI']: # sql = f"SELECT video_id FROM videoods.movie_store_video_allow_list " \ # f"WHERE allow_list_type=0 AND " \ # f"video_id NOT IN (" \ # f"SELECT video_id FROM videoods.movie_store_video_allow_list WHERE allow_list_type=1" \ # f") AND " \ # f"create_time>='{create_time}'" else: sql = "" data_df = get_data_from_odps(project='videoods', sql=sql) if data_df is not None: video_ids = [int(video_id) for video_id in data_df['video_id'].to_list()] log_.info(f'video_ids count = {len(video_ids)}') # 对视频状态进行过滤 filtered_videos = filter_video_status(list(video_ids)) log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos))) if not filtered_videos: log_.info('流量池中视频状态不符合分发') return None # 预测 video_score = get_score(filtered_videos) log_.info('predict finished!') # 上传数据到redis redis_data = {} for i in range(len(video_score)): video_id = filtered_videos[i] score = video_score[i] redis_data[video_id] = score key_name = config_.FLOWPOOL_KEY_NAME_PREFIX + str(app_type) redis_helper = RedisHelper() # 如果key已存在,删除key if redis_helper.key_exists(key_name): redis_helper.del_keys(key_name) # 写入redis redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=24 * 3600) log_.info('data to redis finished!') def get_score_19(video_ids): data = {} step = round(100.0 / len(video_ids), 3) for i, video_id in enumerate(video_ids): score = 100 - i * step data[video_id] = score return data def predict_19(app_type): log_.info(f'app_type = {app_type}') now = datetime.datetime.now() log_.info(f"now = {datetime.datetime.strftime(now, '%Y-%m-%d %H:%M:%S')}") sql_create_time = datetime.datetime.strftime(now - datetime.timedelta(days=30), '%Y-%m-%d %H:%M:%S') if sql_create_time < '2022-04-22 16:40:00': sql_create_time = '2022-04-22 16:40:00' sql = f"SELECT video_id, create_time FROM videoods.movie_store_video_allow_list_final " \ f"WHERE create_time>='{sql_create_time}'" \ f"ORDER BY create_time DESC;" data_df = get_data_from_odps(project='videoods', sql=sql) if data_df is not None: video_ids = [int(video_id) for video_id in data_df['video_id'].to_list()] log_.info(f'video_ids count = {len(video_ids)}') # 预测 video_score = get_score_19(video_ids=video_ids) # 对视频状态进行过滤 filtered_videos = filter_video_status(list(video_ids)) log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos))) if not filtered_videos: log_.info('流量池中视频状态不符合分发') return None # 上传数据到redis data = {} for video_id in filtered_videos: score = video_score[video_id] data[video_id] = score log_.info('predict finished!') key_name = config_.FLOWPOOL_KEY_NAME_PREFIX + str(app_type) redis_helper = RedisHelper() # 如果key已存在,删除key if redis_helper.key_exists(key_name): redis_helper.del_keys(key_name) # 写入redis redis_helper.add_data_with_zset(key_name=key_name, data=data, expire_time=24 * 3600) log_.info('data to redis finished!') if __name__ == '__main__': app_type_list = [config_.APP_TYPE['LAO_HAO_KAN_VIDEO'], config_.APP_TYPE['ZUI_JING_QI']] log_.info('flow pool predict start...') for app_name, app_type in config_.APP_TYPE.items(): log_.info('{} predict start...'.format(app_name)) if app_type == config_.APP_TYPE['LAO_HAO_KAN_VIDEO']: predict_18_19(app_type=app_type) elif app_type == config_.APP_TYPE['ZUI_JING_QI']: predict_19(app_type=app_type) else: predict(app_type=app_type) log_.info('{} predict end...'.format(app_name)) log_.info('flow pool predict end...') # 将日志上传到oss # log_cmd = "ossutil cp -r -f {} oss://{}/{}".format(log_.logname, config_.BUCKET_NAME, # config_.OSS_FOLDER_LOGS + 'flow_pool/') # os.system(log_cmd)