pool_predict.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. import datetime
  2. import random
  3. import time
  4. import os
  5. import traceback
  6. import random
  7. from config import set_config
  8. from utils import request_post, filter_video_status, send_msg_to_feishu
  9. from log import Log
  10. from db_helper import RedisHelper
  11. from odps import ODPS
  12. config_, _ = set_config()
  13. log_ = Log()
  14. def get_videos_from_flow_pool(app_type, size=1000):
  15. """
  16. 从流量池获取视频,循环获取,直到返回数据为None结束
  17. :param app_type: 产品标识 type-int
  18. :param size: 每次获取视频数量,type-int,默认1000
  19. :return: videos [{'videoId': 1111, 'flowPool': ''}, ...]
  20. """
  21. # 获取批次标识,利用首次获取数据时间戳为标记
  22. batch_flag = int(time.time())
  23. request_data = {'appType': app_type, 'batchFlag': batch_flag, 'size': size}
  24. videos = []
  25. while True:
  26. result = request_post(request_url=config_.GET_VIDEOS_FROM_POOL_URL, request_data=request_data)
  27. if result is None:
  28. break
  29. if result['code'] != 0:
  30. log_.info('batch_flag: {}, 获取流量池视频失败'.format(batch_flag))
  31. break
  32. if not result['data']:
  33. break
  34. videos.extend(result['data'])
  35. return videos
  36. def get_videos_remain_view_count(app_type, videos_info):
  37. """
  38. 获取视频在流量池中的剩余可分发数
  39. :param app_type: 产品标识 type-int
  40. :param videos_info: 视频信息 (视频id, 流量池标记) type-list,[(video_id, flow_pool), ...]
  41. :return: data type-list,[(video_id, flow_pool, view_count), ...]
  42. """
  43. if not videos_info:
  44. return []
  45. videos = [{'videoId': info[0], 'flowPool': info[1]} for info in videos_info]
  46. request_data = {'appType': app_type, 'videos': videos}
  47. result = request_post(request_url=config_.GET_REMAIN_VIEW_COUNT_URL, request_data=request_data)
  48. if result is None:
  49. return []
  50. if result['code'] != 0:
  51. log_.info('获取视频在流量池中的剩余可分发数失败')
  52. return []
  53. data = [(item['videoId'], item['flowPool'], item['viewCount']) for item in result['data']]
  54. return data
  55. def get_score(video_ids):
  56. # 以[0, 100]之间的随机浮点数作为score
  57. return [random.uniform(0, 100) for _ in range(len(video_ids))]
  58. def predict(app_type):
  59. """
  60. 对流量池视频排序,并将结果上传Redis
  61. :param app_type: 产品标识 type-int
  62. :return: None
  63. """
  64. try:
  65. # 从流量池获取数据
  66. videos = get_videos_from_flow_pool(app_type=app_type)
  67. if len(videos) <= 0:
  68. log_.info('流量池中无需分发的视频')
  69. return None
  70. # video_id 与 flow_pool 进行mapping
  71. video_ids = set()
  72. log_.info('流量池中视频数:{}'.format(len(videos)))
  73. mapping = {}
  74. for video in videos:
  75. video_id = video['videoId']
  76. video_ids.add(video_id)
  77. if video_id in mapping:
  78. mapping[video_id].append(video['flowPool'])
  79. else:
  80. mapping[video_id] = [video['flowPool']]
  81. # 对视频状态进行过滤
  82. filtered_videos = filter_video_status(list(video_ids))
  83. log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos)))
  84. if not filtered_videos:
  85. log_.info('流量池中视频状态不符合分发')
  86. return None
  87. # 预测
  88. video_score = get_score(filtered_videos)
  89. log_.info('predict finished!')
  90. # 上传数据到redis
  91. redis_data = {}
  92. quick_flow_pool_redis_data = {}
  93. for i in range(len(video_score)):
  94. video_id = filtered_videos[i]
  95. score = video_score[i]
  96. for flow_pool in mapping.get(video_id):
  97. # 判断是否为快速曝光流量池视频
  98. value = '{}-{}'.format(video_id, flow_pool)
  99. flow_pool_id = int(flow_pool.split('#')[0]) # flowPool: 流量池ID#分级ID#级别Level#生命周期ID
  100. if flow_pool_id == config_.QUICK_FLOW_POOL_ID:
  101. quick_flow_pool_redis_data[value] = score
  102. else:
  103. redis_data[value] = score
  104. # 快速曝光流量池视频写入redis
  105. redis_helper = RedisHelper()
  106. quick_flow_pool_key_name = f"{config_.QUICK_FLOWPOOL_KEY_NAME_PREFIX}{app_type}.{config_.QUICK_FLOW_POOL_ID}"
  107. # 如果key已存在,删除key
  108. if redis_helper.key_exists(quick_flow_pool_key_name):
  109. redis_helper.del_keys(quick_flow_pool_key_name)
  110. # 写入redis
  111. if quick_flow_pool_redis_data:
  112. log_.info(f"quick_flow_pool_redis_data = {quick_flow_pool_redis_data}")
  113. redis_helper.add_data_with_zset(key_name=quick_flow_pool_key_name, data=quick_flow_pool_redis_data,
  114. expire_time=24 * 3600)
  115. # 普通流量池视频写入redis
  116. flow_pool_key_name = f"{config_.FLOWPOOL_KEY_NAME_PREFIX}{app_type}"
  117. # 如果key已存在,删除key
  118. if redis_helper.key_exists(flow_pool_key_name):
  119. redis_helper.del_keys(flow_pool_key_name)
  120. # 写入redis
  121. if redis_data:
  122. redis_helper.add_data_with_zset(key_name=flow_pool_key_name, data=redis_data, expire_time=24 * 3600)
  123. log_.info('data to redis finished!')
  124. except Exception as e:
  125. log_.error('流量池更新失败, appType: {} exception: {}, traceback: {}'.format(
  126. app_type, e, traceback.format_exc()))
  127. send_msg_to_feishu(
  128. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  129. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  130. msg_text='rov-offline{} - 流量池更新失败, appType: {}, exception: {}'.format(config_.ENV_TEXT, app_type, e)
  131. )
  132. def get_data_from_odps(project, sql):
  133. """检查数据是否准备好"""
  134. odps = ODPS(
  135. access_id=config_.ODPS_CONFIG['ACCESSID'],
  136. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  137. project=project,
  138. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  139. connect_timeout=3000,
  140. read_timeout=500000,
  141. pool_maxsize=1000,
  142. pool_connections=1000
  143. )
  144. try:
  145. with odps.execute_sql(sql=sql).open_reader() as reader:
  146. data_df = reader.to_pandas()
  147. except Exception as e:
  148. data_df = None
  149. return data_df
  150. def predict_18_19(app_type):
  151. log_.info(f'app_type = {app_type}')
  152. now = datetime.datetime.now()
  153. log_.info(f"now = {datetime.datetime.strftime(now, '%Y-%m-%d %H:%M:%S')}")
  154. # create_time = datetime.datetime.strftime(now - datetime.timedelta(hours=24), '%Y-%m-%d %H:%M:%S')
  155. create_time = '2022-04-22 16:40:00'
  156. if app_type == config_.APP_TYPE['LAO_HAO_KAN_VIDEO']:
  157. sql = f"SELECT video_id FROM videoods.movie_store_video_allow_list " \
  158. f"WHERE allow_list_type=1 AND create_time>='{create_time}'"
  159. # elif app_type == config_.APP_TYPE['ZUI_JING_QI']:
  160. # sql = f"SELECT video_id FROM videoods.movie_store_video_allow_list " \
  161. # f"WHERE allow_list_type=0 AND " \
  162. # f"video_id NOT IN (" \
  163. # f"SELECT video_id FROM videoods.movie_store_video_allow_list WHERE allow_list_type=1" \
  164. # f") AND " \
  165. # f"create_time>='{create_time}'"
  166. else:
  167. sql = ""
  168. data_df = get_data_from_odps(project='videoods', sql=sql)
  169. if data_df is not None:
  170. video_ids = [int(video_id) for video_id in data_df['video_id'].to_list()]
  171. log_.info(f'video_ids count = {len(video_ids)}')
  172. # 对视频状态进行过滤
  173. filtered_videos = filter_video_status(list(video_ids))
  174. log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos)))
  175. if not filtered_videos:
  176. log_.info('流量池中视频状态不符合分发')
  177. return None
  178. # 预测
  179. video_score = get_score(filtered_videos)
  180. log_.info('predict finished!')
  181. # 上传数据到redis
  182. redis_data = {}
  183. for i in range(len(video_score)):
  184. video_id = filtered_videos[i]
  185. score = video_score[i]
  186. redis_data[video_id] = score
  187. key_name = config_.FLOWPOOL_KEY_NAME_PREFIX + str(app_type)
  188. redis_helper = RedisHelper()
  189. # 如果key已存在,删除key
  190. if redis_helper.key_exists(key_name):
  191. redis_helper.del_keys(key_name)
  192. # 写入redis
  193. redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=24 * 3600)
  194. log_.info('data to redis finished!')
  195. def get_score_19(video_ids):
  196. data = {}
  197. step = round(100.0 / len(video_ids), 3)
  198. for i, video_id in enumerate(video_ids):
  199. score = 100 - i * step
  200. data[video_id] = score
  201. return data
  202. def predict_19(app_type):
  203. log_.info(f'app_type = {app_type}')
  204. now = datetime.datetime.now()
  205. log_.info(f"now = {datetime.datetime.strftime(now, '%Y-%m-%d %H:%M:%S')}")
  206. sql_create_time = datetime.datetime.strftime(now - datetime.timedelta(days=30), '%Y-%m-%d %H:%M:%S')
  207. if sql_create_time < '2022-04-22 16:40:00':
  208. sql_create_time = '2022-04-22 16:40:00'
  209. sql = f"SELECT video_id, create_time FROM videoods.movie_store_video_allow_list_final " \
  210. f"WHERE create_time>='{sql_create_time}'" \
  211. f"ORDER BY create_time DESC;"
  212. data_df = get_data_from_odps(project='videoods', sql=sql)
  213. if data_df is not None:
  214. video_ids = [int(video_id) for video_id in data_df['video_id'].to_list()]
  215. log_.info(f'video_ids count = {len(video_ids)}')
  216. # 预测
  217. video_score = get_score_19(video_ids=video_ids)
  218. # 对视频状态进行过滤
  219. filtered_videos = filter_video_status(list(video_ids))
  220. log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos)))
  221. if not filtered_videos:
  222. log_.info('流量池中视频状态不符合分发')
  223. return None
  224. # 上传数据到redis
  225. data = {}
  226. for video_id in filtered_videos:
  227. score = video_score[video_id]
  228. data[video_id] = score
  229. log_.info('predict finished!')
  230. key_name = config_.FLOWPOOL_KEY_NAME_PREFIX + str(app_type)
  231. redis_helper = RedisHelper()
  232. # 如果key已存在,删除key
  233. if redis_helper.key_exists(key_name):
  234. redis_helper.del_keys(key_name)
  235. # 写入redis
  236. redis_helper.add_data_with_zset(key_name=key_name, data=data, expire_time=24 * 3600)
  237. log_.info('data to redis finished!')
  238. if __name__ == '__main__':
  239. # res = get_videos_from_pool(app_type=0)
  240. # res = get_videos_remain_view_count(app_type=0, videos_info=[('12345', '#2#1#111')])
  241. # print(res)
  242. app_type_list = [config_.APP_TYPE['LAO_HAO_KAN_VIDEO'], config_.APP_TYPE['ZUI_JING_QI']]
  243. log_.info('flow pool predict start...')
  244. for app_name, app_type in config_.APP_TYPE.items():
  245. log_.info('{} predict start...'.format(app_name))
  246. if app_type == config_.APP_TYPE['LAO_HAO_KAN_VIDEO']:
  247. predict_18_19(app_type=app_type)
  248. elif app_type == config_.APP_TYPE['ZUI_JING_QI']:
  249. predict_19(app_type=app_type)
  250. else:
  251. predict(app_type=app_type)
  252. log_.info('{} predict end...'.format(app_name))
  253. log_.info('flow pool predict end...')
  254. # 将日志上传到oss
  255. # log_cmd = "ossutil cp -r -f {} oss://{}/{}".format(log_.logname, config_.BUCKET_NAME,
  256. # config_.OSS_FOLDER_LOGS + 'flow_pool/')
  257. # os.system(log_cmd)