import random import time import os import traceback import random from config import set_config from utils import request_post, filter_video_status, send_msg_to_feishu from log import Log from db_helper import RedisHelper config_, _ = set_config() log_ = Log() def get_videos_from_flow_pool(app_type, size=1000): """ 从流量池获取视频,循环获取,直到返回数据为None结束 :param app_type: 产品标识 type-int :param size: 每次获取视频数量,type-int,默认1000 :return: videos [{'videoId': 1111, 'flowPool': ''}, ...] """ # 获取批次标识,利用首次获取数据时间戳为标记 batch_flag = int(time.time()) request_data = {'appType': app_type, 'batchFlag': batch_flag, 'size': size} videos = [] while True: result = request_post(request_url=config_.GET_VIDEOS_FROM_POOL_URL, request_data=request_data) if result is None: break if result['code'] != 0: log_.info('batch_flag: {}, 获取流量池视频失败'.format(batch_flag)) break if not result['data']: break videos.extend(result['data']) return videos def get_videos_remain_view_count(app_type, videos_info): """ 获取视频在流量池中的剩余可分发数 :param app_type: 产品标识 type-int :param videos_info: 视频信息 (视频id, 流量池标记) type-list,[(video_id, flow_pool), ...] :return: data type-list,[(video_id, flow_pool, view_count), ...] """ if not videos_info: return [] videos = [{'videoId': info[0], 'flowPool': info[1]} for info in videos_info] request_data = {'appType': app_type, 'videos': videos} result = request_post(request_url=config_.GET_REMAIN_VIEW_COUNT_URL, request_data=request_data) if result is None: return [] if result['code'] != 0: log_.info('获取视频在流量池中的剩余可分发数失败') return [] data = [(item['videoId'], item['flowPool'], item['viewCount']) for item in result['data']] return data def get_score(video_ids): # 以[0, 100]之间的随机浮点数作为score return [random.uniform(0, 100) for _ in range(len(video_ids))] def predict(app_type): """ 对流量池视频排序,并将结果上传Redis :param app_type: 产品标识 type-int :return: None """ try: # 从流量池获取数据 videos = get_videos_from_flow_pool(app_type=app_type) if len(videos) <= 0: log_.info('流量池中无需分发的视频') return None # video_id 与 flow_pool 进行mapping video_ids = set() log_.info('流量池中视频数:{}'.format(len(videos))) mapping = {} for video in videos: video_id = video['videoId'] video_ids.add(video_id) if video_id in mapping: mapping[video_id].append(video['flowPool']) else: mapping[video_id] = [video['flowPool']] # 对视频状态进行过滤 filtered_videos = filter_video_status(list(video_ids)) log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos))) if not filtered_videos: log_.info('流量池中视频状态不符合分发') return None # 预测 video_score = get_score(filtered_videos) log_.info('predict finished!') # 上传数据到redis redis_data = {} for i in range(len(video_score)): video_id = filtered_videos[i] score = video_score[i] for flow_pool in mapping.get(video_id): value = '{}-{}'.format(video_id, flow_pool) redis_data[value] = score key_name = config_.FLOWPOOL_KEY_NAME_PREFIX + str(app_type) redis_helper = RedisHelper() # 如果key已存在,删除key if redis_helper.key_exists(key_name): redis_helper.del_keys(key_name) # 写入redis redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=24 * 3600) log_.info('data to redis finished!') except Exception as e: log_.error('流量池更新失败, appType: {} exception: {}, traceback: {}'.format( app_type, e, traceback.format_exc())) send_msg_to_feishu( webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'), key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'), msg_text='rov-offline{} - 流量池更新失败, appType: {}, exception: {}'.format(config_.ENV_TEXT, app_type, e) ) if __name__ == '__main__': # res = get_videos_from_pool(app_type=0) # res = get_videos_remain_view_count(app_type=0, videos_info=[('12345', '#2#1#111')]) # print(res) log_.info('flow pool predict start...') for app_name, app_type in config_.APP_TYPE.items(): log_.info('{} predict start...'.format(app_name)) predict(app_type=app_type) log_.info('{} predict end...'.format(app_name)) log_.info('flow pool predict end...') # 将日志上传到oss # log_cmd = "ossutil cp -r -f {} oss://{}/{}".format(log_.logname, config_.BUCKET_NAME, # config_.OSS_FOLDER_LOGS + 'flow_pool/') # os.system(log_cmd)