ad_video_data_update_with_new_strategy.py 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243
  1. import datetime
  2. import traceback
  3. import multiprocessing
  4. from threading import Timer
  5. from my_utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu, send_msg_to_feishu_new
  6. from my_config import set_config
  7. from log import Log
  8. config_, _ = set_config()
  9. log_ = Log()
  10. redis_helper = RedisHelper()
  11. features = [
  12. 'apptype',
  13. 'videoid',
  14. 'ad_type', # 0: all, 1: 自营,2: 微信
  15. 'sharerate', # 被分享的概率
  16. 'no_ad_rate', # 不出广告的概率
  17. 'no_adrate_share', # 被分享的情况下且不出广告的概率
  18. 'ad_rate', # 出广告的概率
  19. 'adrate_share', # 被分享的情况下且出广告的概率
  20. ]
  21. def get_top10_abnormal_videos_return(dt, filter_param):
  22. """获取昨日各端top10中的异常视频(裂变视频)"""
  23. abnormal_video_project = config_.ad_model_data['top10_videos'].get('project')
  24. abnormal_video_table = config_.ad_model_data['top10_videos'].get('table')
  25. abnormal_video_features = [
  26. 'apptype', 'videoid', 'yesterday_return', 'rank', 'multiple'
  27. ]
  28. data_count = data_check(project=abnormal_video_project, table=abnormal_video_table, dt=dt)
  29. top10_abnormal_videos = {}
  30. if data_count > 0:
  31. abnormal_video_df = get_feature_data(project=abnormal_video_project, table=abnormal_video_table,
  32. features=abnormal_video_features, dt=dt)
  33. abnormal_video_df['multiple'].fillna(0, inplace=True)
  34. abnormal_video_df['apptype'] = abnormal_video_df['apptype'].astype(int)
  35. abnormal_video_df['videoid'] = abnormal_video_df['videoid'].astype(int)
  36. abnormal_video_df['yesterday_return'] = abnormal_video_df['yesterday_return'].astype(int)
  37. abnormal_video_df['rank'] = abnormal_video_df['rank'].astype(int)
  38. abnormal_video_df['multiple'] = abnormal_video_df['multiple'].astype(float)
  39. app_type_list = list(set(abnormal_video_df['apptype'].tolist()))
  40. for app_type in app_type_list:
  41. app_type_df = abnormal_video_df[abnormal_video_df['apptype'] == app_type]
  42. app_type_df = app_type_df.sort_values(by=['rank'], ascending=True)
  43. # print(app_type_df)
  44. temp_video_id_list = []
  45. for index, item in app_type_df.iterrows():
  46. # print(item['rank'], item['videoid'], item['multiple'])
  47. if item['multiple'] > filter_param:
  48. # print(item['videoid'], item['multiple'])
  49. abnormal_video_id_list = temp_video_id_list.copy()
  50. abnormal_video_id_list.append(int(item['videoid']))
  51. top10_abnormal_videos[app_type] = abnormal_video_id_list
  52. temp_video_id_list.append(int(item['videoid']))
  53. else:
  54. temp_video_id_list.append(int(item['videoid']))
  55. # print(top10_abnormal_videos)
  56. log_.info(f"top10_abnormal_videos = {top10_abnormal_videos}")
  57. return top10_abnormal_videos
  58. def predict_video_share_rate_with_ad(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
  59. """预估视频有广告时被分享的概率"""
  60. # 获取对应的视频特征
  61. video_df = video_initial_df.copy()
  62. # 获取所有广告类型对应的数据
  63. video_df['ad_type'] = video_df['ad_type'].astype(int)
  64. video_df = video_df[video_df['ad_type'] == 0]
  65. video_df['apptype'] = video_df['apptype'].astype(int)
  66. video_df = video_df[video_df['apptype'] == int(data_param)]
  67. log_.info(f"video_df length: {len(video_df)}")
  68. # print(video_df)
  69. video_df['ad_rate'].fillna(0, inplace=True)
  70. video_df['sharerate'].fillna(0, inplace=True)
  71. video_df['adrate_share'].fillna(0, inplace=True)
  72. video_df['ad_rate'] = video_df['ad_rate'].astype(float)
  73. video_df['sharerate'] = video_df['sharerate'].astype(float)
  74. video_df['adrate_share'] = video_df['adrate_share'].astype(float)
  75. # 计算视频有广告时被分享率
  76. video_df = video_df[video_df['ad_rate'] != 0]
  77. # print(video_df)
  78. video_df['video_ad_share_rate'] = \
  79. video_df['adrate_share'] * video_df['sharerate'] / video_df['ad_rate']
  80. video_df['video_ad_share_rate'].fillna(0, inplace=True)
  81. # log_.info(f"video_df: {video_df}")
  82. # video_df = video_df[video_df['video_ad_share_rate'] != 0]
  83. log_.info(f"video_df filtered 0 length: {len(video_df)}")
  84. # 结果写入redis
  85. key_name = f"{config_.KEY_NAME_PREFIX_VIDEO_WITH_AD}{data_key}:{dt}"
  86. redis_data = {}
  87. for index, item in video_df.iterrows():
  88. redis_data[int(item['videoid'])] = item['video_ad_share_rate']
  89. # 剔除异常视频数据
  90. video_df['videoid'] = video_df['videoid'].astype(int)
  91. top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
  92. if top10_abnormal_video_ids is not None:
  93. video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
  94. group_ad_share_rate_mean = video_df['video_ad_share_rate'].mean()
  95. redis_data[-1] = group_ad_share_rate_mean
  96. log_.info(f"redis_data count: {len(redis_data)}")
  97. if len(redis_data) > 0:
  98. redis_helper = RedisHelper()
  99. redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
  100. return video_df
  101. def predict_video_share_rate_no_ad(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
  102. """预估视频无广告时被分享的概率"""
  103. # 获取对应的视频特征
  104. video_df = video_initial_df.copy()
  105. # 获取所有广告类型对应的数据
  106. video_df['ad_type'] = video_df['ad_type'].astype(int)
  107. video_df = video_df[video_df['ad_type'] == 0]
  108. video_df['apptype'] = video_df['apptype'].astype(int)
  109. video_df = video_df[video_df['apptype'] == int(data_param)]
  110. log_.info(f"video_df length: {len(video_df)}")
  111. video_df['no_ad_rate'].fillna(0, inplace=True)
  112. video_df['sharerate'].fillna(0, inplace=True)
  113. video_df['no_adrate_share'].fillna(0, inplace=True)
  114. video_df['no_ad_rate'] = video_df['no_ad_rate'].astype(float)
  115. video_df['sharerate'] = video_df['sharerate'].astype(float)
  116. video_df['no_adrate_share'] = video_df['no_adrate_share'].astype(float)
  117. # 计算视频无广告时被分享率
  118. video_df = video_df[video_df['no_ad_rate'] != 0]
  119. video_df['video_no_ad_share_rate'] = \
  120. video_df['no_adrate_share'] * video_df['sharerate'] / video_df['no_ad_rate']
  121. video_df['video_no_ad_share_rate'].fillna(0, inplace=True)
  122. # log_.info(f"video_df: {video_df}")
  123. # video_df = video_df[video_df['video_no_ad_share_rate'] != 0]
  124. log_.info(f"video_df filtered 0 length: {len(video_df)}")
  125. # 结果写入redis
  126. key_name = f"{config_.KEY_NAME_PREFIX_VIDEO_NO_AD}{data_key}:{dt}"
  127. redis_data = {}
  128. for index, item in video_df.iterrows():
  129. redis_data[int(item['videoid'])] = item['video_no_ad_share_rate']
  130. # 剔除异常视频数据
  131. video_df['videoid'] = video_df['videoid'].astype(int)
  132. top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
  133. if top10_abnormal_video_ids is not None:
  134. video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
  135. group_ad_share_rate_mean = video_df['video_no_ad_share_rate'].mean()
  136. redis_data[-1] = group_ad_share_rate_mean
  137. log_.info(f"redis_data count: {len(redis_data)}")
  138. if len(redis_data) > 0:
  139. redis_helper = RedisHelper()
  140. redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
  141. return video_df
  142. def update_videos_data(project, table, dt, update_params, top10_abnormal_videos):
  143. """预估视频有广告时分享率"""
  144. # 获取视频特征
  145. video_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
  146. for data_key, data_param in update_params.items():
  147. log_.info(f"data_key = {data_key} update start...")
  148. log_.info(f"predict_video_share_rate_with_ad start...")
  149. predict_video_share_rate_with_ad(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
  150. data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
  151. log_.info(f"predict_video_share_rate_with_ad end!")
  152. log_.info(f"predict_video_share_rate_no_ad start...")
  153. predict_video_share_rate_no_ad(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
  154. data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
  155. log_.info(f"predict_video_share_rate_no_ad end!")
  156. log_.info(f"data_key = {data_key} update end!")
  157. def timer_check(dt, video_key, video_params, top10_abnormal_videos):
  158. log_.info(f"video_key = {video_key}")
  159. project = config_.ad_model_data[video_key].get('project')
  160. table = config_.ad_model_data[video_key].get('table')
  161. # 查看当前更新的数据是否已准备好
  162. data_count = data_check(project=project, table=table, dt=dt)
  163. if data_count > 0:
  164. log_.info(f"ad video data count = {data_count}")
  165. # 数据准备好,进行更新
  166. update_videos_data(project=project, table=table, dt=dt, update_params=video_params,
  167. top10_abnormal_videos=top10_abnormal_videos)
  168. log_.info(f"video_key = {video_key} ad video data update end!")
  169. msg_list = [
  170. f"env: rov-offline {config_.ENV_TEXT}",
  171. f"video_key: {video_key}",
  172. f"now_date: {dt}",
  173. f"finished time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
  174. ]
  175. send_msg_to_feishu_new(
  176. webhook=config_.FEISHU_ROBOT['ad_video_update_robot'].get('webhook'),
  177. key_word=config_.FEISHU_ROBOT['ad_video_update_robot'].get('key_word'),
  178. title='新策略 -- 广告模型视频分享率预测数据更新完成',
  179. msg_list=msg_list
  180. )
  181. else:
  182. # 数据没准备好,1分钟后重新检查
  183. Timer(60, timer_check, args=[dt, video_key, video_params, top10_abnormal_videos]).start()
  184. def main():
  185. try:
  186. now_date = datetime.datetime.today()
  187. dt = datetime.datetime.strftime(now_date, '%Y%m%d')
  188. log_.info(f"now_date: {dt}")
  189. # 获取昨天top10中的异常视频(裂变视频)
  190. top10_abnormal_videos = get_top10_abnormal_videos_return(
  191. dt=dt, filter_param=config_.ad_model_data['top10_videos'].get('abnormal_filter_param')
  192. )
  193. update_params = config_.AD_VIDEO_DATA_PARAMS_NEW_STRATEGY
  194. pool = multiprocessing.Pool(processes=len(update_params))
  195. for video_key, video_params in update_params.items():
  196. pool.apply_async(
  197. func=timer_check,
  198. args=(dt, video_key, video_params, top10_abnormal_videos)
  199. )
  200. pool.close()
  201. pool.join()
  202. # for video_key, video_params in update_params.items():
  203. # timer_check(dt, video_key, video_params, top10_abnormal_videos)
  204. except Exception as e:
  205. log_.error(f"新策略 -- 广告模型视频分享率预测数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
  206. msg_list = [
  207. f"env: rov-offline {config_.ENV_TEXT}",
  208. f"now time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
  209. f"exception: {e}",
  210. f"traceback: {traceback.format_exc()}",
  211. ]
  212. send_msg_to_feishu_new(
  213. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  214. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  215. title='新策略 -- 广告模型视频分享率预测数据更新失败',
  216. msg_list=msg_list
  217. )
  218. if __name__ == '__main__':
  219. # timer_check()
  220. main()