user_group_update.py 3.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. import datetime
  2. import multiprocessing
  3. import traceback
  4. from threading import Timer
  5. from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
  6. from config import set_config
  7. from log import Log
  8. config_, _ = set_config()
  9. log_ = Log()
  10. redis_helper = RedisHelper()
  11. features = [
  12. 'apptype',
  13. 'return1mids',
  14. 'return2_3mids',
  15. 'return4_8mids',
  16. 'return9_24mids',
  17. 'return25_nmids',
  18. 'return0share1mids',
  19. 'return0share2_nmids'
  20. ]
  21. def update_user_group_to_redis(project, table, dt, app_type):
  22. """更新mid对应分组到redis中"""
  23. # 获取用户分组数据
  24. feature_df = get_feature_data(project=project, table=table, features=features, dt=dt)
  25. feature_df['apptype'] = feature_df['apptype'].astype(int)
  26. feature_df = feature_df[feature_df['apptype'] == app_type]
  27. group_list = features[1:]
  28. for group in group_list:
  29. log_.info(f"group = {group} update redis start ...")
  30. mid_list = feature_df[group].tolist()
  31. mid_list = list(set(mid_list))
  32. mid_list = [mid for mid in mid_list if mid is not None]
  33. log_.info(f"mid count = {len(mid_list)}")
  34. # pool = multiprocessing.Pool(processes=2)
  35. for mid in mid_list:
  36. # print(mid)
  37. key_name = f"{config_.KEY_NAME_PREFIX_MID_GROUP}{mid}"
  38. redis_helper.set_data_to_redis(key_name=key_name, value=group, expire_time=25 * 3600)
  39. # pool.apply_async(
  40. # func=redis_helper.set_data_to_redis,
  41. # args=(key_name, group, 25 * 3600)
  42. # )
  43. # pool.close()
  44. # pool.join()
  45. log_.info(f"group = {group}, mid count = {len(mid_list)}, update redis finished!")
  46. def timer_check():
  47. try:
  48. app_type = config_.APP_TYPE['VLOG']
  49. project = config_.ad_model_data['user_group'].get('project')
  50. table = config_.ad_model_data['user_group'].get('table')
  51. now_date = datetime.datetime.today() - datetime.timedelta(days=1)
  52. dt = datetime.datetime.strftime(now_date, '%Y%m%d')
  53. log_.info(f"now_date: {dt}")
  54. now_min = datetime.datetime.now().minute
  55. # 查看当前更新的数据是否已准备好
  56. data_count = data_check(project=project, table=table, dt=dt)
  57. if data_count > 0:
  58. log_.info(f"user group data count = {data_count}")
  59. # 数据准备好,进行更新
  60. update_user_group_to_redis(project=project, table=table, dt=dt, app_type=app_type)
  61. log_.info(f"user group data update end!")
  62. elif now_min > 45:
  63. log_.info('user group data is None!')
  64. send_msg_to_feishu(
  65. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  66. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  67. msg_text=f"rov-offline{config_.ENV_TEXT} - 用户分组数据未准备好!\n"
  68. f"traceback: {traceback.format_exc()}"
  69. )
  70. else:
  71. # 数据没准备好,1分钟后重新检查
  72. Timer(60, timer_check).start()
  73. except Exception as e:
  74. log_.error(f"用户分组数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
  75. send_msg_to_feishu(
  76. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  77. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  78. msg_text=f"rov-offline{config_.ENV_TEXT} - 用户分组数据更新失败\n"
  79. f"exception: {e}\n"
  80. f"traceback: {traceback.format_exc()}"
  81. )
  82. if __name__ == '__main__':
  83. timer_check()