ad_video_data_update.py 7.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. import datetime
  2. import traceback
  3. from threading import Timer
  4. from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
  5. from config import set_config
  6. from log import Log
  7. config_, _ = set_config()
  8. log_ = Log()
  9. redis_helper = RedisHelper()
  10. features = [
  11. 'apptype',
  12. 'videoid',
  13. 'sharerate_all',
  14. 'sharerate_ad'
  15. ]
  16. def get_top10_abnormal_videos_return(dt, filter_param):
  17. """获取昨日各端top10中的异常视频(裂变视频)"""
  18. abnormal_video_project = config_.ad_model_data['top10_videos'].get('project')
  19. abnormal_video_table = config_.ad_model_data['top10_videos'].get('table')
  20. abnormal_video_features = [
  21. 'apptype', 'videoid', 'yesterday_return', 'rank', 'multiple'
  22. ]
  23. data_count = data_check(project=abnormal_video_project, table=abnormal_video_table, dt=dt)
  24. top10_abnormal_videos = {}
  25. if data_count > 0:
  26. abnormal_video_df = get_feature_data(project=abnormal_video_project, table=abnormal_video_table,
  27. features=abnormal_video_features, dt=dt)
  28. abnormal_video_df['multiple'].fillna(0, inplace=True)
  29. abnormal_video_df['apptype'] = abnormal_video_df['apptype'].astype(int)
  30. abnormal_video_df['videoid'] = abnormal_video_df['videoid'].astype(int)
  31. abnormal_video_df['yesterday_return'] = abnormal_video_df['yesterday_return'].astype(int)
  32. abnormal_video_df['rank'] = abnormal_video_df['rank'].astype(int)
  33. abnormal_video_df['multiple'] = abnormal_video_df['multiple'].astype(float)
  34. app_type_list = list(set(abnormal_video_df['apptype'].tolist()))
  35. for app_type in app_type_list:
  36. app_type_df = abnormal_video_df[abnormal_video_df['apptype'] == app_type]
  37. app_type_df = app_type_df.sort_values(by=['rank'], ascending=True)
  38. # print(app_type_df)
  39. temp_video_id_list = []
  40. for index, item in app_type_df.iterrows():
  41. # print(item['rank'], item['videoid'], item['multiple'])
  42. if item['multiple'] > filter_param:
  43. # print(item['videoid'], item['multiple'])
  44. abnormal_video_id_list = temp_video_id_list.copy()
  45. abnormal_video_id_list.append(int(item['videoid']))
  46. top10_abnormal_videos[app_type] = abnormal_video_id_list
  47. temp_video_id_list.append(int(item['videoid']))
  48. else:
  49. temp_video_id_list.append(int(item['videoid']))
  50. # print(top10_abnormal_videos)
  51. log_.info(f"top10_abnormal_videos = {top10_abnormal_videos}")
  52. return top10_abnormal_videos
  53. def predict_video_share_rate(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
  54. """预估视频有广告时分享率"""
  55. # 获取对应的视频特征
  56. video_df = video_initial_df.copy()
  57. video_df['apptype'] = video_df['apptype'].astype(int)
  58. video_df = video_df[video_df['apptype'] == int(data_param)]
  59. video_df['sharerate_all'].fillna(0, inplace=True)
  60. video_df['sharerate_ad'].fillna(0, inplace=True)
  61. video_df['sharerate_all'] = video_df['sharerate_all'].astype(float)
  62. video_df['sharerate_ad'] = video_df['sharerate_ad'].astype(float)
  63. # 获取有广告时所有视频近30天的分享率
  64. ad_all_videos_share_rate = video_df[video_df['videoid'] == 'allvideos']['sharerate_ad'].values[0]
  65. video_df = video_df[video_df['videoid'] != 'allvideos']
  66. # 剔除异常视频数据
  67. video_df['videoid'] = video_df['videoid'].astype(int)
  68. top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
  69. # print(int(data_param), len(video_df), top10_abnormal_video_ids)
  70. if top10_abnormal_video_ids is not None:
  71. video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
  72. # print(len(video_df))
  73. # 计算视频有广告时分享率
  74. video_df['video_ad_share_rate'] = \
  75. video_df['sharerate_ad'] * float(ad_all_videos_share_rate) / video_df['sharerate_all']
  76. video_df['video_ad_share_rate'].fillna(0, inplace=True)
  77. video_df = video_df[video_df['video_ad_share_rate'] != 0]
  78. # 结果写入redis
  79. key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{data_key}:{dt}"
  80. redis_data = {}
  81. for index, item in video_df.iterrows():
  82. redis_data[int(item['videoid'])] = item['video_ad_share_rate']
  83. group_ad_share_rate_mean = video_df['video_ad_share_rate'].mean()
  84. redis_data[-1] = group_ad_share_rate_mean
  85. # 异常视频给定值:mean/3
  86. if top10_abnormal_video_ids is not None:
  87. for abnormal_video_id in top10_abnormal_video_ids:
  88. print(abnormal_video_id, group_ad_share_rate_mean, group_ad_share_rate_mean/3)
  89. redis_data[int(abnormal_video_id)] = group_ad_share_rate_mean / 3
  90. if len(redis_data) > 0:
  91. redis_helper = RedisHelper()
  92. redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
  93. return video_df
  94. def update_videos_data(project, table, dt, update_params, top10_abnormal_videos):
  95. """预估视频有广告时分享率"""
  96. # 获取视频特征
  97. video_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
  98. for data_key, data_param in update_params.items():
  99. log_.info(f"data_key = {data_key} update start...")
  100. predict_video_share_rate(video_initial_df=video_initial_df, dt=dt, data_key=data_key, data_param=data_param,
  101. top10_abnormal_videos=top10_abnormal_videos)
  102. log_.info(f"data_key = {data_key} update end!")
  103. def timer_check(dt, video_key, video_params, top10_abnormal_videos):
  104. log_.info(f"video_key = {video_key}")
  105. project = config_.ad_model_data[video_key].get('project')
  106. table = config_.ad_model_data[video_key].get('table')
  107. # 查看当前更新的数据是否已准备好
  108. data_count = data_check(project=project, table=table, dt=dt)
  109. if data_count > 0:
  110. log_.info(f"ad video data count = {data_count}")
  111. # 数据准备好,进行更新
  112. update_videos_data(project=project, table=table, dt=dt, update_params=video_params,
  113. top10_abnormal_videos=top10_abnormal_videos)
  114. log_.info(f"ad video data update end!")
  115. send_msg_to_feishu(
  116. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  117. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  118. msg_text=f"rov-offline{config_.ENV_TEXT} - 视频数据更新完成\n"
  119. f"video_key: {video_key}\n"
  120. f"now_date: {dt}\n"
  121. f"finished time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}"
  122. )
  123. else:
  124. # 数据没准备好,1分钟后重新检查
  125. Timer(60, timer_check, args=[dt, video_key, video_params, top10_abnormal_videos]).start()
  126. def main():
  127. try:
  128. now_date = datetime.datetime.today()
  129. dt = datetime.datetime.strftime(now_date, '%Y%m%d')
  130. log_.info(f"now_date: {dt}")
  131. # 获取昨天top10中的异常视频(裂变视频)
  132. top10_abnormal_videos = get_top10_abnormal_videos_return(
  133. dt=dt, filter_param=config_.ad_model_data['top10_videos'].get('abnormal_filter_param')
  134. )
  135. update_params = config_.AD_VIDEO_DATA_PARAMS
  136. for video_key, video_params in update_params.items():
  137. timer_check(dt, video_key, video_params, top10_abnormal_videos)
  138. except Exception as e:
  139. log_.error(f"视频分享率预测数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
  140. send_msg_to_feishu(
  141. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  142. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  143. msg_text=f"rov-offline{config_.ENV_TEXT} - 视频分享率预测数据更新失败\n"
  144. f"exception: {e}\n"
  145. f"traceback: {traceback.format_exc()}"
  146. )
  147. if __name__ == '__main__':
  148. # timer_check()
  149. main()