ad_user_video_predict.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418
  1. import datetime
  2. import sys
  3. import traceback
  4. import numpy as np
  5. import pandas as pd
  6. from odps import ODPS
  7. from my_utils import data_check, get_feature_data, send_msg_to_feishu_new, RedisHelper
  8. from my_config import set_config
  9. from log import Log
  10. config_, _ = set_config()
  11. log_ = Log()
  12. redis_helper = RedisHelper()
  13. def predict_user_group_share_rate(dt, app_type):
  14. """预估用户组对应的有广告时分享率"""
  15. # 获取用户组特征
  16. project = config_.ad_model_data['users_share_rate'].get('project')
  17. table = config_.ad_model_data['users_share_rate'].get('table')
  18. features = [
  19. 'apptype',
  20. 'group',
  21. 'sharerate_all',
  22. 'sharerate_ad'
  23. ]
  24. user_group_df = get_feature_data(project=project, table=table, features=features, dt=dt)
  25. user_group_df['apptype'] = user_group_df['apptype'].astype(int)
  26. user_group_df = user_group_df[user_group_df['apptype'] == app_type]
  27. user_group_df['sharerate_all'] = user_group_df['sharerate_all'].astype(float)
  28. user_group_df['sharerate_ad'] = user_group_df['sharerate_ad'].astype(float)
  29. # 获取有广告时所有用户组近30天的分享率
  30. ad_all_group_share_rate = user_group_df[user_group_df['group'] == 'allmids']['sharerate_ad'].values[0]
  31. user_group_df = user_group_df[user_group_df['group'] != 'allmids']
  32. # 计算用户组有广告时分享率
  33. user_group_df['group_ad_share_rate'] = \
  34. user_group_df['sharerate_ad'] * float(ad_all_group_share_rate) / user_group_df['sharerate_all']
  35. return user_group_df
  36. def predict_video_share_rate(dt, app_type):
  37. """预估视频有广告时分享率"""
  38. # 获取视频特征
  39. project = config_.ad_model_data['videos_share_rate'].get('project')
  40. table = config_.ad_model_data['videos_share_rate'].get('table')
  41. features = [
  42. 'apptype',
  43. 'videoid',
  44. 'sharerate_all',
  45. 'sharerate_ad'
  46. ]
  47. video_df = get_feature_data(project=project, table=table, features=features, dt=dt)
  48. video_df['apptype'] = video_df['apptype'].astype(int)
  49. video_df = video_df[video_df['apptype'] == app_type]
  50. video_df['sharerate_all'] = video_df['sharerate_all'].astype(float)
  51. video_df['sharerate_ad'] = video_df['sharerate_ad'].astype(float)
  52. # 获取有广告时所有视频近30天的分享率
  53. ad_all_videos_share_rate = video_df[video_df['videoid'] == 'allvideos']['sharerate_ad'].values[0]
  54. video_df = video_df[video_df['videoid'] != 'allvideos']
  55. # 计算视频有广告时分享率
  56. video_df['video_ad_share_rate'] = \
  57. video_df['sharerate_ad'] * float(ad_all_videos_share_rate) / video_df['sharerate_all']
  58. return video_df
  59. def predict_ad_group_video(dt, config_key, config_param, threshold_record):
  60. log_.info(f"config_key = {config_key} update start ...")
  61. # 获取用户组预测值
  62. user_data_key = config_param['user'].get('data')
  63. user_rule_key = config_param['user'].get('rule')
  64. group_key_name = f"{config_.KEY_NAME_PREFIX_AD_GROUP}{user_data_key}:{user_rule_key}:{dt}"
  65. group_data = redis_helper.get_all_data_from_zset(key_name=group_key_name, with_scores=True)
  66. if group_data is None:
  67. log_.info(f"group data is None!")
  68. group_df = pd.DataFrame(data=group_data, columns=['group', 'group_ad_share_rate'])
  69. group_df = group_df[group_df['group'] != 'mean_group']
  70. log_.info(f"group_df count = {len(group_df)}")
  71. # 获取视频预测值
  72. video_data_key = config_param['video'].get('data')
  73. video_key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{video_data_key}:{dt}"
  74. video_data = redis_helper.get_all_data_from_zset(key_name=video_key_name, with_scores=True)
  75. if video_data is None:
  76. log_.info(f"video data is None!")
  77. video_df = pd.DataFrame(data=video_data, columns=['videoid', 'video_ad_share_rate'])
  78. video_df = video_df[video_df['videoid'] != -1]
  79. log_.info(f"video_df count = {len(video_df)}")
  80. if len(group_df) == 0 or len(video_df) == 0:
  81. sys.exit(1)
  82. predict_df = video_df
  83. all_group_data = []
  84. for index, item in group_df.iterrows():
  85. predict_df[item['group']] = predict_df['video_ad_share_rate'] * item['group_ad_share_rate']
  86. all_group_data.extend(predict_df[item['group']].tolist())
  87. # 计算对应的阈值
  88. # ad_threshold_mappings = config_.AD_ABTEST_THRESHOLD_CONFIG.get(config_key.split('-')[0])
  89. ad_threshold_mappings = threshold_record.get(config_key.split('-')[0])
  90. for abtest_group, ad_threshold_mapping in ad_threshold_mappings.items():
  91. threshold_data = {}
  92. for _, item in group_df.iterrows():
  93. # 获取分组对应的均值作为阈值
  94. threshold_data[item['group']] = predict_df[item['group']].mean() * ad_threshold_mapping['group']
  95. threshold_data['mean_group'] = np.mean(all_group_data) * ad_threshold_mapping['mean_group']
  96. # 获取需要多出广告的用户组,及阈值比例
  97. more_ad = config_param.get('more_ad', None)
  98. if more_ad is not None:
  99. for group_key, group_threshold_rate in more_ad.items():
  100. threshold_data[group_key] = threshold_data[group_key] * group_threshold_rate
  101. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, threshold_data = {threshold_data}")
  102. # 将阈值写入redis
  103. abtest_config_list = config_key.split('-')
  104. abtest_id, abtest_config_tag = abtest_config_list[0], abtest_config_list[1]
  105. for key, val in threshold_data.items():
  106. key_name = f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  107. redis_helper.set_data_to_redis(key_name=key_name, value=val, expire_time=2 * 24 * 3600)
  108. # 计算关怀模式实验阈值 并 写入Redis
  109. care_model = config_param.get('care_model', None)
  110. threshold_rate = config_param.get('threshold_rate', None)
  111. if care_model is True:
  112. care_model_threshold_data = {}
  113. for key, val in threshold_data.items():
  114. up_val = val * threshold_rate
  115. care_model_threshold_data[key] = up_val
  116. up_key_name = \
  117. f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD_CARE_MODEL}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  118. redis_helper.set_data_to_redis(key_name=up_key_name, value=up_val, expire_time=2 * 24 * 3600)
  119. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, "
  120. f"care_model_threshold_data = {care_model_threshold_data}")
  121. # predict_df.to_csv(f'./data/ad_user_video_predict_{config_key}.csv')
  122. log_.info(f"config_key = {config_key} update end!")
  123. def predict_ad_group_video_mix_with_add(dt, config_key, config_param, threshold_record):
  124. log_.info(f"config_key = {config_key} update start ...")
  125. # ###### 获取以分享为目标的数据
  126. # 获取用户组预测值(出广告后分享的概率)
  127. share_user_data_key = config_param['share']['user'].get('data')
  128. share_user_rule_key = config_param['share']['user'].get('rule')
  129. share_group_key_name = f"{config_.KEY_NAME_PREFIX_AD_GROUP}{share_user_data_key}:{share_user_rule_key}:{dt}"
  130. share_group_data = redis_helper.get_all_data_from_zset(key_name=share_group_key_name, with_scores=True)
  131. if share_group_data is None:
  132. log_.info(f"share group data is None!")
  133. share_group_df = pd.DataFrame(data=share_group_data, columns=['group', 'group_ad_share_rate'])
  134. share_group_df = share_group_df[share_group_df['group'] != 'mean_group']
  135. log_.info(f"share_group_df count = {len(share_group_df)}")
  136. # 获取视频预测值(出广告后不分享的概率)
  137. share_video_data_key = config_param['share']['video'].get('data')
  138. share_video_key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{share_video_data_key}:{dt}"
  139. share_video_data = redis_helper.get_all_data_from_zset(key_name=share_video_key_name, with_scores=True)
  140. if share_video_data is None:
  141. log_.info(f"share video data is None!")
  142. share_video_df = pd.DataFrame(data=share_video_data, columns=['videoid', 'video_ad_share_rate'])
  143. share_video_df = share_video_df[share_video_df['videoid'] != -1]
  144. log_.info(f"share_video_df count = {len(share_video_df)}")
  145. if len(share_video_df) == 0 or len(share_video_df) == 0:
  146. sys.exit(1)
  147. # ###### 获取以不直接跳出为目标的数据
  148. # 获取用户组预测值(出广告后不直接跳出的概率)
  149. out_user_data_key = config_param['out']['user'].get('data')
  150. out_user_rule_key = config_param['out']['user'].get('rule')
  151. out_group_key_name = f"{config_.KEY_NAME_PREFIX_AD_GROUP}{out_user_data_key}:{out_user_rule_key}:{dt}"
  152. out_group_data = redis_helper.get_all_data_from_zset(key_name=out_group_key_name, with_scores=True)
  153. if out_group_data is None:
  154. log_.info(f"out group data is None!")
  155. out_group_df = pd.DataFrame(data=out_group_data, columns=['group', 'group_ad_not_out_rate'])
  156. out_group_df = out_group_df[out_group_df['group'] != 'mean_group']
  157. log_.info(f"out_group_df count = {len(out_group_df)}")
  158. # 获取视频预测值(出广告后不直接跳出的概率)
  159. out_video_data_key = config_param['out']['video'].get('data')
  160. out_video_key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{out_video_data_key}:{dt}"
  161. out_video_data = redis_helper.get_all_data_from_zset(key_name=out_video_key_name, with_scores=True)
  162. if out_video_data is None:
  163. log_.info(f"out video data is None!")
  164. out_video_df = pd.DataFrame(data=out_video_data, columns=['videoid', 'video_ad_not_out_rate'])
  165. out_video_df = out_video_df[out_video_df['videoid'] != -1]
  166. log_.info(f"out_video_df count = {len(out_video_df)}")
  167. if len(share_video_df) == 0 or len(share_video_df) == 0:
  168. sys.exit(1)
  169. # 加权融合
  170. share_weight = config_param['mix_param']['share_weight']
  171. out_weight = config_param['mix_param']['out_weight']
  172. # 用户侧数据
  173. group_df = pd.merge(share_group_df, out_group_df, on='group')
  174. group_df['group_rate'] = \
  175. share_weight * group_df['group_ad_share_rate'] + out_weight * group_df['group_ad_not_out_rate']
  176. # 视频侧数据
  177. video_df = pd.merge(share_video_df, out_video_df, on='videoid')
  178. video_df['video_rate'] = \
  179. share_weight * video_df['video_ad_share_rate'] + out_weight * video_df['video_ad_not_out_rate']
  180. predict_df = video_df.copy()
  181. all_group_data = []
  182. for index, item in group_df.iterrows():
  183. predict_df[item['group']] = predict_df['video_rate'] * item['group_rate']
  184. all_group_data.extend(predict_df[item['group']].tolist())
  185. # 计算对应的阈值
  186. ad_threshold_mappings = threshold_record.get(config_key.split('-')[0])
  187. for abtest_group, ad_threshold_mapping in ad_threshold_mappings.items():
  188. threshold_data = {}
  189. for _, item in group_df.iterrows():
  190. # 获取分组对应的均值作为阈值
  191. threshold_data[item['group']] = predict_df[item['group']].mean() * ad_threshold_mapping['group']
  192. threshold_data['mean_group'] = np.mean(all_group_data) * ad_threshold_mapping['mean_group']
  193. # 获取需要多出广告的用户组,及阈值比例
  194. more_ad = config_param.get('more_ad', None)
  195. if more_ad is not None:
  196. for group_key, group_threshold_rate in more_ad.items():
  197. threshold_data[group_key] = threshold_data[group_key] * group_threshold_rate
  198. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, threshold_data = {threshold_data}")
  199. # 将阈值写入redis
  200. abtest_config_list = config_key.split('-')
  201. abtest_id, abtest_config_tag = abtest_config_list[0], abtest_config_list[1]
  202. for key, val in threshold_data.items():
  203. key_name = f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  204. redis_helper.set_data_to_redis(key_name=key_name, value=val, expire_time=2 * 24 * 3600)
  205. # 计算关怀模式实验阈值 并 写入Redis
  206. care_model = config_param.get('care_model', None)
  207. threshold_rate = config_param.get('threshold_rate', None)
  208. if care_model is True:
  209. care_model_threshold_data = {}
  210. for key, val in threshold_data.items():
  211. up_val = val * threshold_rate
  212. care_model_threshold_data[key] = up_val
  213. up_key_name = \
  214. f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD_CARE_MODEL}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  215. redis_helper.set_data_to_redis(key_name=up_key_name, value=up_val, expire_time=2 * 24 * 3600)
  216. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, "
  217. f"care_model_threshold_data = {care_model_threshold_data}")
  218. # predict_df.to_csv(f'./data/ad_user_video_predict_{config_key}.csv')
  219. log_.info(f"config_key = {config_key} update end!")
  220. def predict_ad_group_video_mix_with_multiply(dt, config_key, config_param, threshold_record):
  221. log_.info(f"config_key = {config_key} update start ...")
  222. # ###### 获取以分享为目标的数据
  223. # 获取用户组预测值(出广告后分享的概率)
  224. share_user_data_key = config_param['share']['user'].get('data')
  225. share_user_rule_key = config_param['share']['user'].get('rule')
  226. share_group_key_name = f"{config_.KEY_NAME_PREFIX_AD_GROUP}{share_user_data_key}:{share_user_rule_key}:{dt}"
  227. share_group_data = redis_helper.get_all_data_from_zset(key_name=share_group_key_name, with_scores=True)
  228. if share_group_data is None:
  229. log_.info(f"share group data is None!")
  230. share_group_df = pd.DataFrame(data=share_group_data, columns=['group', 'group_ad_share_rate'])
  231. share_group_df = share_group_df[share_group_df['group'] != 'mean_group']
  232. log_.info(f"share_group_df count = {len(share_group_df)}")
  233. # 获取视频预测值(出广告后分享的概率)
  234. share_video_data_key = config_param['share']['video'].get('data')
  235. share_video_key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{share_video_data_key}:{dt}"
  236. share_video_data = redis_helper.get_all_data_from_zset(key_name=share_video_key_name, with_scores=True)
  237. if share_video_data is None:
  238. log_.info(f"share video data is None!")
  239. share_video_df = pd.DataFrame(data=share_video_data, columns=['videoid', 'video_ad_share_rate'])
  240. share_video_df = share_video_df[share_video_df['videoid'] != -1]
  241. log_.info(f"share_video_df count = {len(share_video_df)}")
  242. if len(share_video_df) == 0 or len(share_video_df) == 0:
  243. sys.exit(1)
  244. # ###### 获取以不直接跳出为目标的数据
  245. # 获取用户组预测值(出广告后不直接跳出的概率)
  246. out_user_data_key = config_param['out']['user'].get('data')
  247. out_user_rule_key = config_param['out']['user'].get('rule')
  248. out_group_key_name = f"{config_.KEY_NAME_PREFIX_AD_GROUP}{out_user_data_key}:{out_user_rule_key}:{dt}"
  249. out_group_data = redis_helper.get_all_data_from_zset(key_name=out_group_key_name, with_scores=True)
  250. if out_group_data is None:
  251. log_.info(f"out group data is None!")
  252. out_group_df = pd.DataFrame(data=out_group_data, columns=['group', 'group_ad_not_out_rate'])
  253. out_group_df = out_group_df[out_group_df['group'] != 'mean_group']
  254. log_.info(f"out_group_df count = {len(out_group_df)}")
  255. # 获取视频预测值(出广告后不直接跳出的概率)
  256. out_video_data_key = config_param['out']['video'].get('data')
  257. out_video_key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{out_video_data_key}:{dt}"
  258. out_video_data = redis_helper.get_all_data_from_zset(key_name=out_video_key_name, with_scores=True)
  259. if out_video_data is None:
  260. log_.info(f"out video data is None!")
  261. out_video_df = pd.DataFrame(data=out_video_data, columns=['videoid', 'video_ad_not_out_rate'])
  262. out_video_df = out_video_df[out_video_df['videoid'] != -1]
  263. log_.info(f"out_video_df count = {len(out_video_df)}")
  264. if len(share_video_df) == 0 or len(share_video_df) == 0:
  265. sys.exit(1)
  266. # 乘积融合
  267. # 用户侧数据
  268. group_df = pd.merge(share_group_df, out_group_df, on='group')
  269. group_df['group_rate'] = group_df['group_ad_share_rate'] * group_df['group_ad_not_out_rate']
  270. # 视频侧数据
  271. video_df = pd.merge(share_video_df, out_video_df, on='videoid')
  272. video_df['video_rate'] = video_df['video_ad_share_rate'] * video_df['video_ad_not_out_rate']
  273. predict_df = video_df.copy()
  274. all_group_data = []
  275. for index, item in group_df.iterrows():
  276. predict_df[item['group']] = predict_df['video_rate'] * item['group_rate']
  277. all_group_data.extend(predict_df[item['group']].tolist())
  278. # 计算对应的阈值
  279. ad_threshold_mappings = threshold_record.get(config_key.split('-')[0])
  280. for abtest_group, ad_threshold_mapping in ad_threshold_mappings.items():
  281. threshold_data = {}
  282. for _, item in group_df.iterrows():
  283. # 获取分组对应的均值作为阈值
  284. threshold_data[item['group']] = predict_df[item['group']].mean() * ad_threshold_mapping['group']
  285. threshold_data['mean_group'] = np.mean(all_group_data) * ad_threshold_mapping['mean_group']
  286. # 获取需要多出广告的用户组,及阈值比例
  287. more_ad = config_param.get('more_ad', None)
  288. if more_ad is not None:
  289. for group_key, group_threshold_rate in more_ad.items():
  290. threshold_data[group_key] = threshold_data[group_key] * group_threshold_rate
  291. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, threshold_data = {threshold_data}")
  292. # 将阈值写入redis
  293. abtest_config_list = config_key.split('-')
  294. abtest_id, abtest_config_tag = abtest_config_list[0], abtest_config_list[1]
  295. for key, val in threshold_data.items():
  296. key_name = f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  297. redis_helper.set_data_to_redis(key_name=key_name, value=val, expire_time=2 * 24 * 3600)
  298. # 计算关怀模式实验阈值 并 写入Redis
  299. care_model = config_param.get('care_model', None)
  300. threshold_rate = config_param.get('threshold_rate', None)
  301. if care_model is True:
  302. care_model_threshold_data = {}
  303. for key, val in threshold_data.items():
  304. up_val = val * threshold_rate
  305. care_model_threshold_data[key] = up_val
  306. up_key_name = \
  307. f"{config_.KEY_NAME_PREFIX_AD_THRESHOLD_CARE_MODEL}{abtest_id}:{abtest_config_tag}:{abtest_group}:{key}"
  308. redis_helper.set_data_to_redis(key_name=up_key_name, value=up_val, expire_time=2 * 24 * 3600)
  309. log_.info(f"config_key = {config_key}, abtest_group = {abtest_group}, "
  310. f"care_model_threshold_data = {care_model_threshold_data}")
  311. # predict_df.to_csv(f'./data/ad_user_video_predict_{config_key}.csv')
  312. log_.info(f"config_key = {config_key} update end!")
  313. def predict():
  314. try:
  315. now_date = datetime.datetime.today()
  316. dt = datetime.datetime.strftime(now_date, '%Y%m%d')
  317. log_.info(f"dt = {dt}")
  318. # 获取阈值参数记录
  319. threshold_record = redis_helper.get_data_from_redis(key_name=config_.KEY_NAME_PREFIX_AD_THRESHOLD_RECORD)
  320. # print(threshold_record)
  321. threshold_record = eval(threshold_record)
  322. log_.info(f"threshold_record = {threshold_record}")
  323. params = config_.AD_ABTEST_CONFIG
  324. for config_key, config_param in params.items():
  325. if config_param.get('threshold_mix_func') == 'add':
  326. predict_ad_group_video_mix_with_add(dt=dt,
  327. config_key=config_key,
  328. config_param=config_param,
  329. threshold_record=threshold_record)
  330. elif config_param.get('threshold_mix_func') == 'multiply':
  331. predict_ad_group_video_mix_with_multiply(dt=dt,
  332. config_key=config_key,
  333. config_param=config_param,
  334. threshold_record=threshold_record)
  335. else:
  336. predict_ad_group_video(dt=dt,
  337. config_key=config_key,
  338. config_param=config_param,
  339. threshold_record=threshold_record)
  340. # 阈值参数记录
  341. # redis_helper.set_data_to_redis(key_name=config_.KEY_NAME_PREFIX_AD_THRESHOLD_RECORD,
  342. # value=str(config_.AD_ABTEST_THRESHOLD_CONFIG),
  343. # expire_time=24*3600)
  344. redis_helper.set_data_to_redis(key_name=config_.KEY_NAME_PREFIX_AD_THRESHOLD_RECORD,
  345. value=str(threshold_record),
  346. expire_time=2 * 24 * 3600)
  347. msg_list = [
  348. f"env: rov-offline {config_.ENV_TEXT}",
  349. f"finished time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
  350. ]
  351. send_msg_to_feishu_new(
  352. webhook=config_.FEISHU_ROBOT['ad_threshold_update_robot'].get('webhook'),
  353. key_word=config_.FEISHU_ROBOT['ad_threshold_update_robot'].get('key_word'),
  354. title='广告模型阈值更新完成',
  355. msg_list=msg_list
  356. )
  357. except Exception as e:
  358. log_.error(f"广告模型阈值更新失败, exception: {e}, traceback: {traceback.format_exc()}")
  359. msg_list = [
  360. f"env: rov-offline {config_.ENV_TEXT}",
  361. f"now time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
  362. f"exception: {e}",
  363. f"traceback: {traceback.format_exc()}",
  364. ]
  365. send_msg_to_feishu_new(
  366. webhook=config_.FEISHU_ROBOT['ad_threshold_update_robot'].get('webhook'),
  367. key_word=config_.FEISHU_ROBOT['ad_threshold_update_robot'].get('key_word'),
  368. title='广告模型阈值更新失败',
  369. msg_list=msg_list
  370. )
  371. if __name__ == '__main__':
  372. # predict_ad_group_video()
  373. predict()