flowpool_data_update_with_level_v2.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260
  1. import time
  2. import traceback
  3. import json
  4. import random
  5. from my_config import set_config
  6. from my_utils import request_post, filter_video_status, send_msg_to_feishu, filter_video_status_app, \
  7. filter_political_videos
  8. from log import Log
  9. from db_helper import RedisHelper
  10. from odps import ODPS
  11. config_, _ = set_config()
  12. log_ = Log()
  13. def get_videos_from_flow_pool(app_type, size=1000):
  14. """
  15. 从流量池获取视频,循环获取,直到返回数据为None结束
  16. :param app_type: 产品标识 type-int
  17. :param size: 每次获取视频数量,type-int,默认1000
  18. :return: videos [{'videoId': 1111, 'flowPool': ''}, ...]
  19. """
  20. # 获取批次标识,利用首次获取数据时间戳为标记
  21. batch_flag = int(time.time()) * 1000 + 111
  22. log_.info(f"batch_flag: {batch_flag}")
  23. request_data = {'appType': app_type, 'batchFlag': batch_flag, 'size': size}
  24. videos = []
  25. retry = 0
  26. while True:
  27. result = request_post(request_url=config_.GET_VIDEOS_FROM_POOL_URL, request_data=request_data)
  28. if result is None:
  29. if retry > 2:
  30. break
  31. retry += 1
  32. continue
  33. if result['code'] != 0:
  34. log_.info('batch_flag: {}, 获取流量池视频失败'.format(batch_flag))
  35. if retry > 2:
  36. break
  37. retry += 1
  38. continue
  39. if not result['data']:
  40. if retry > 2:
  41. break
  42. retry += 1
  43. continue
  44. videos.extend(result['data'])
  45. return videos
  46. def update_remain_view_count(video_info_list):
  47. """
  48. 获取视频在流量池中的剩余可分发数,并存入对应的redis中
  49. :param app_type: 产品标识 type-int
  50. :param video_info_list: 视频信息 (视频id, 流量池标记) type-list,[(video_id, flow_pool), ...]
  51. :return: data type-list,[(video_id, flow_pool, view_count), ...]
  52. """
  53. redis_helper = RedisHelper()
  54. if not video_info_list:
  55. return dict()
  56. remain_videos = dict()
  57. # 每次请求10个
  58. for i in range(len(video_info_list)//10 + 1):
  59. remain_st_time = time.time()
  60. videos = [{'videoId': info[0], 'flowPool': info[1]} for info in video_info_list[i*10:(i+1)*10]]
  61. request_data = {'videos': videos}
  62. result = request_post(request_url=config_.GET_REMAIN_VIEW_COUNT_URL,
  63. request_data=request_data, timeout=(0.5, 3))
  64. log_.info(f"i = {i}, expend time = {(time.time()-remain_st_time)*1000}")
  65. if result is None:
  66. continue
  67. if result['code'] != 0:
  68. log_.error('获取视频在流量池中的剩余可分发数失败')
  69. continue
  70. for item in result['data']:
  71. if item['distributeCount'] is None:
  72. continue
  73. distribute_count = int(item['distributeCount'])
  74. if distribute_count > 0:
  75. remain_videos[item['videoId']]=distribute_count
  76. # 将分发数更新到本地记录
  77. key_name = f"{config_.LOCAL_DISTRIBUTE_COUNT_PREFIX}{item['videoId']}:{item['flowPool']}"
  78. redis_helper.set_data_to_redis(key_name=key_name, value=distribute_count, expire_time=25 * 60)
  79. else:
  80. # 将本地记录删除
  81. key_name = f"{config_.LOCAL_DISTRIBUTE_COUNT_PREFIX}{item['videoId']}:{item['flowPool']}"
  82. redis_helper.del_keys(key_name=key_name)
  83. log_.info(f"新增加不分发过滤前后整体数量: {len(video_info_list)}:{len(remain_videos)}")
  84. return remain_videos
  85. def get_flow_pool_recommend_config(flow_pool_id):
  86. """获取流量池推荐分发配置"""
  87. result = request_post(request_url=config_.GET_FLOW_POOL_RECOMMEND_CONFIG_URL)
  88. if result is None:
  89. return None
  90. if result['code'] != 0:
  91. return None
  92. flow_pool_distribute_config = result['data'].get('flowPoolDistributeConfig')
  93. if flow_pool_distribute_config:
  94. if int(eval(flow_pool_distribute_config).get('flowPoolId')) == flow_pool_id:
  95. return eval(eval(flow_pool_distribute_config).get('distributeRate'))
  96. else:
  97. return None
  98. else:
  99. return None
  100. def update_flow_pool(flow_pool_id_list):
  101. """
  102. 获取流量池可分发视频,并将结果上传Redis
  103. :param app_type: 产品标识 type-int
  104. :return: None
  105. """
  106. # 所有产品都从0取数据
  107. app_type = 0
  108. try:
  109. # 从流量池获取数据
  110. videos = get_videos_from_flow_pool(app_type=app_type)
  111. if len(videos) <= 0:
  112. log_.info('流量池中无需分发的视频')
  113. return
  114. # video_id 与 flow_pool, level 进行mapping
  115. video_ids = set()
  116. log_.info('流量池中视频数:{}'.format(len(videos)))
  117. mapping = {}
  118. for video in videos:
  119. flow_pool_id = video['flowPoolId'] # 召回使用的切分ID是在这里做的。流量池中的视频是不区分ID的,也不区分层。
  120. if int(flow_pool_id) not in flow_pool_id_list:
  121. continue
  122. # print(f"flow_pool_id: {flow_pool_id}")
  123. video_id = video['videoId']
  124. video_ids.add(video_id)
  125. item_info = {'flowPool': video['flowPool'], 'level': video['level']}
  126. if video_id in mapping:
  127. mapping[video_id].append(item_info)
  128. else:
  129. mapping[video_id] = [item_info]
  130. log_.info(f"需更新流量池视频数: {len(video_ids)}")
  131. # 对视频状态进行过滤
  132. filtered_videos = filter_video_status(list(video_ids))
  133. log_.info('filter videos status finished, filtered_videos nums={}'.format(len(filtered_videos)))
  134. if not filtered_videos:
  135. log_.info('流量池中视频状态不符合分发')
  136. return
  137. # 1. 更新分发数量,过滤分发数量为0的视频
  138. video_info_list = []
  139. for video_id in filtered_videos:
  140. for item in mapping.get(video_id):
  141. flow_pool = item['flowPool']
  142. video_info = (video_id, flow_pool)
  143. if video_info not in video_info_list:
  144. video_info_list.append(video_info)
  145. log_.info(f"video_info_list count = {len(video_info_list)}")
  146. remain_videos = update_remain_view_count(video_info_list)
  147. if not remain_videos:
  148. log_.info('流量池中视频状态不符合分发')
  149. return
  150. # 上传数据到redis
  151. quick_flow_pool_redis_data = set()
  152. flow_pool_redis_data = dict()
  153. level_weight = dict()
  154. for video_id, distribute_count in remain_videos.items():
  155. for item in mapping.get(video_id):
  156. flow_pool = item['flowPool']
  157. level = item['level']
  158. # 判断是否为快速曝光流量池视频
  159. value = '{}-{}'.format(video_id, flow_pool)
  160. flow_pool_id = int(flow_pool.split('#')[0]) # flowPool: 流量池ID#分级ID#级别Level#生命周期ID
  161. if flow_pool_id == config_.QUICK_FLOW_POOL_ID:
  162. quick_flow_pool_redis_data.add(value)
  163. else:
  164. if level not in flow_pool_redis_data:
  165. flow_pool_redis_data[level] = set()
  166. level_weight[level] = 0
  167. flow_pool_redis_data[level].add(value)
  168. level_weight[level]=level_weight[level]+distribute_count
  169. # 2. quick曝光池
  170. redis_helper = RedisHelper()
  171. quick_flow_pool_key_name = f"{config_.QUICK_FLOWPOOL_KEY_NAME_PREFIX_SET}{app_type}:{config_.QUICK_FLOW_POOL_ID}"
  172. # 如果key已存在,删除key
  173. if redis_helper.key_exists(quick_flow_pool_key_name):
  174. redis_helper.del_keys(quick_flow_pool_key_name)
  175. if quick_flow_pool_redis_data:
  176. log_.info(f"quick_flow_pool_redis_data = {quick_flow_pool_redis_data}")
  177. redis_helper.add_data_with_set(key_name=quick_flow_pool_key_name, values=quick_flow_pool_redis_data,
  178. expire_time=24 * 3600)
  179. # 快速流量池分发概率存入redis
  180. distribute_rate_key_name = f"{config_.QUICK_FLOWPOOL_DISTRIBUTE_RATE_KEY_NAME_PREFIX}{config_.QUICK_FLOW_POOL_ID}"
  181. distribute_rate = get_flow_pool_recommend_config(flow_pool_id=config_.QUICK_FLOW_POOL_ID)
  182. if distribute_rate is not None:
  183. redis_helper.set_data_to_redis(key_name=distribute_rate_key_name, value=distribute_rate,
  184. expire_time=15 * 60)
  185. # 3. 普通流量池
  186. for level, videos in flow_pool_redis_data.items():
  187. log_.info(f"level: {level}, videos_count: {len(videos)}")
  188. flow_pool_key_name = f"flow:pool:level:item:v2:{app_type}:{level}"
  189. # 如果key已存在,删除key
  190. if redis_helper.key_exists(flow_pool_key_name):
  191. redis_helper.del_keys(flow_pool_key_name)
  192. # 写入redis
  193. if videos:
  194. redis_helper.add_data_with_set(key_name=flow_pool_key_name, values=videos, expire_time=24 * 3600)
  195. result = redis_helper.get_data_from_set(flow_pool_key_name)
  196. if not result:
  197. result = []
  198. size = len(result)
  199. log_.info(f'写入成功key={flow_pool_key_name}:{size}')
  200. # 4. 写入权重
  201. weight=json.dumps(level_weight)
  202. redis_helper.set_data_to_redis(key_name="flow:pool:level:weight:v2", value=weight, expire_time=24*3600)
  203. log_.info(f'权重 {weight}')
  204. log_.info('data to redis finished!')
  205. except Exception as e:
  206. log_.error('流量池更新失败, appType: {} exception: {}, traceback: {}'.format(
  207. app_type, e, traceback.format_exc()))
  208. send_msg_to_feishu(
  209. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  210. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  211. msg_text='rov-offline{} - 流量池更新失败, appType: {}, exception: {}'.format(config_.ENV_TEXT, app_type, e)
  212. )
  213. if __name__ == '__main__':
  214. st_time = time.time()
  215. # 为避免第一个app_type获取数据不全,等待1min
  216. log_.info('flow pool predict start...')
  217. # 获取对应流量池id列表
  218. redis_helper = RedisHelper()
  219. flow_pool_abtest_config = redis_helper.get_data_from_redis(key_name=config_.FLOWPOOL_ABTEST_KEY_NAME)
  220. if flow_pool_abtest_config is not None:
  221. flow_pool_abtest_config = json.loads(flow_pool_abtest_config)
  222. else:
  223. flow_pool_abtest_config = {}
  224. flow_pool_id_list = flow_pool_abtest_config.get('experimental_flow_set_level', [])
  225. log_.info('predict start...')
  226. update_flow_pool(flow_pool_id_list=flow_pool_id_list)
  227. log_.info('predict end...')
  228. log_.info(f"expend time = {(time.time() - st_time) * 1000}ms")
  229. log_.info('flow pool predict end...')
  230. # python flowpool_data_update_with_level.py 测试环境必须手动执行python 才能有数据