alg_recsys_recall_aftermerge.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. # -*- coding: utf-8 -*-
  2. import multiprocessing
  3. import sys
  4. import traceback
  5. import gevent
  6. import datetime
  7. import pandas as pd
  8. import math
  9. from functools import reduce
  10. from odps import ODPS
  11. from threading import Timer, Thread
  12. from my_utils import MysqlHelper, RedisHelper, get_data_from_odps, filter_video_status, filter_shield_video, \
  13. check_table_partition_exits, filter_video_status_app, send_msg_to_feishu, filter_political_videos
  14. from my_config import set_config
  15. from log import Log
  16. from check_video_limit_distribute import update_limit_video_score
  17. # os.environ['NUMEXPR_MAX_THREADS'] = '16'
  18. config_, _ = set_config()
  19. log_ = Log()
  20. region_code = config_.REGION_CODE
  21. RULE_PARAMS = {
  22. 'rule_params': {
  23. 'rule66': {
  24. 'view_type': 'video-show-region', 'platform_return_rate': 0.001,
  25. 'region_24h_rule_key': 'rule66', '24h_rule_key': 'rule66'
  26. },
  27. 'rule67': {
  28. 'view_type': 'video-show-region', 'platform_return_rate': 0.001,
  29. 'region_24h_rule_key': 'rule66', '24h_rule_key': 'rule66', 'h_rule_key': 'rule66'
  30. },
  31. 'rule68': {
  32. 'view_type': 'video-show-region', 'platform_return_rate': 0.001,
  33. 'region_24h_rule_key': 'rule66', '24h_rule_key': 'rule66',
  34. 'score_func': 'back_rate_exponential_weighting1'
  35. },
  36. },
  37. 'data_params': config_.DATA_PARAMS,
  38. 'params_list': [
  39. # 532
  40. # {'data': 'data66', 'rule': 'rule66'}, # 523-> 523 & 518
  41. {'data': 'data66', 'rule': 'rule67'}, # 523->510
  42. # {'data': 'data66', 'rule': 'rule68'}, # 523->514
  43. # {'data': 'data66', 'rule': 'rule69'}, # 523->518
  44. ],
  45. }
  46. features = [
  47. 'apptype',
  48. 'code',
  49. 'videoid',
  50. 'lastonehour_preview', # 过去1小时预曝光人数 - 区分地域
  51. 'lastonehour_view', # 过去1小时曝光人数 - 区分地域
  52. 'lastonehour_play', # 过去1小时播放人数 - 区分地域
  53. 'lastonehour_share', # 过去1小时分享人数 - 区分地域
  54. 'lastonehour_return', # 过去1小时分享,过去1小时回流人数 - 区分地域
  55. 'lastonehour_preview_total', # 过去1小时预曝光次数 - 区分地域
  56. 'lastonehour_view_total', # 过去1小时曝光次数 - 区分地域
  57. 'lastonehour_play_total', # 过去1小时播放次数 - 区分地域
  58. 'lastonehour_share_total', # 过去1小时分享次数 - 区分地域
  59. 'platform_return',
  60. 'lastonehour_show', # 不区分地域
  61. 'lastonehour_show_region', # 地域分组
  62. 'lasttwohour_share', # h-2小时分享人数
  63. 'lasttwohour_return_now', # h-2分享,过去1小时回流人数
  64. 'lasttwohour_return', # h-2分享,h-2回流人数
  65. 'lastthreehour_share', # h-3小时分享人数
  66. 'lastthreehour_return_now', # h-3分享,过去1小时回流人数
  67. 'lastthreehour_return', # h-3分享,h-3回流人数
  68. 'lastonehour_return_new', # 过去1小时分享,过去1小时回流人数(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  69. 'lasttwohour_return_now_new', # h-2分享,过去1小时回流人数(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  70. 'lasttwohour_return_new', # h-2分享,h-2回流人数(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  71. 'lastthreehour_return_now_new', # h-3分享,过去1小时回流人数(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  72. 'lastthreehour_return_new', # h-3分享,h-3回流人数(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  73. 'platform_return_new', # 平台分发回流(回流统计为对应地域分享带回的回流,分享限制地域,回流不限制地域)
  74. ]
  75. def h_data_check(project, table, now_date):
  76. """检查数据是否准备好"""
  77. odps = ODPS(
  78. access_id=config_.ODPS_CONFIG['ACCESSID'],
  79. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  80. project=project,
  81. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  82. connect_timeout=3000,
  83. read_timeout=500000,
  84. pool_maxsize=1000,
  85. pool_connections=1000
  86. )
  87. try:
  88. dt = datetime.datetime.strftime(now_date, '%Y%m%d%H')
  89. check_res = check_table_partition_exits(date=dt, project=project, table=table)
  90. if check_res:
  91. sql = f'select * from {project}.{table} where dt = {dt}'
  92. with odps.execute_sql(sql=sql).open_reader() as reader:
  93. data_count = reader.count
  94. else:
  95. data_count = 0
  96. except Exception as e:
  97. data_count = 0
  98. return data_count
  99. def video_rank(df, now_date, now_h, rule_key, param, region, data_key, rule_rank_h_flag,
  100. add_videos_with_pre_h=False, hour_count=0):
  101. shield_config = param.get('shield_config', config_.SHIELD_CONFIG)
  102. political_filter = param.get('political_filter', None)
  103. h_recall_key_name = \
  104. f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H}{region}:{data_key}:{rule_key}:" \
  105. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  106. redis_helper = RedisHelper()
  107. if redis_helper.key_exists(key_name=h_recall_key_name):
  108. initial_data = redis_helper.get_all_data_from_zset(key_name=h_recall_key_name, with_scores=True)
  109. h_video_ids = [int(video_id) for video_id, _ in initial_data]
  110. else:
  111. h_video_ids = []
  112. log_.info("地域小时级别没有数据,下游不会过滤。")
  113. h_rule_key = param.get('h_rule_key', None)
  114. region_24h_rule_key = param.get('region_24h_rule_key', 'rule1')
  115. by_24h_rule_key = param.get('24h_rule_key', None)
  116. by_48h_rule_key = param.get('48h_rule_key', None)
  117. dup_remove = param.get('dup_remove', True)
  118. # 与其他召回视频池去重,存入对应的redis
  119. dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key, h_rule_key=h_rule_key,
  120. region_24h_rule_key=region_24h_rule_key, by_24h_rule_key=by_24h_rule_key,
  121. by_48h_rule_key=by_48h_rule_key, region=region, data_key=data_key,
  122. rule_rank_h_flag=rule_rank_h_flag, political_filter=political_filter,
  123. shield_config=shield_config, dup_remove=dup_remove)
  124. def dup_data(h_video_ids, initial_key_name, dup_key_name, region, political_filter, shield_config, dup_remove):
  125. redis_helper = RedisHelper()
  126. if redis_helper.key_exists(key_name=initial_key_name):
  127. initial_data = redis_helper.get_all_data_from_zset(key_name=initial_key_name, with_scores=True)
  128. # 屏蔽视频过滤
  129. initial_video_ids = [int(video_id) for video_id, _ in initial_data]
  130. dup_data = {}
  131. # 视频去重逻辑
  132. if dup_remove is True:
  133. for video_id, score in initial_data:
  134. if int(video_id) not in h_video_ids and int(video_id) in initial_video_ids:
  135. dup_data[int(video_id)] = score
  136. h_video_ids.append(int(video_id))
  137. else:
  138. for video_id, score in initial_data:
  139. if int(video_id) in initial_video_ids:
  140. dup_data[int(video_id)] = score
  141. if len(dup_data) > 0:
  142. redis_helper.add_data_with_zset(key_name=dup_key_name, data=dup_data, expire_time=2 * 24 * 3600)
  143. # 限流视频score调整
  144. update_limit_video_score(initial_videos=dup_data, key_name=dup_key_name)
  145. return h_video_ids
  146. def dup_to_redis(h_video_ids, now_date, now_h, rule_key, h_rule_key, region_24h_rule_key, by_24h_rule_key, by_48h_rule_key,
  147. region, data_key, rule_rank_h_flag, political_filter, shield_config, dup_remove):
  148. """将地域分组小时级数据与其他召回视频池去重,存入对应的redis"""
  149. if h_rule_key is not None:
  150. h_key_name = \
  151. f"{config_.RECALL_KEY_NAME_PREFIX_BY_H_H}{data_key}:{h_rule_key}:" \
  152. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  153. h_dup_key_name = \
  154. f"{config_.RECALL_KEY_NAME_PREFIX_DUP_H_H}{region}:{data_key}:{rule_key}:" \
  155. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  156. log_.info("开始去重【1小时 无地域,写入key的前缀是:{}".format(h_dup_key_name))
  157. h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=h_key_name,
  158. dup_key_name=h_dup_key_name, region=region, political_filter=political_filter,
  159. shield_config=shield_config, dup_remove=dup_remove)
  160. # ##### 去重更新地域分组小时级24h列表,并另存为redis中
  161. region_24h_key_name = \
  162. f"{config_.RECALL_KEY_NAME_PREFIX_REGION_BY_24H}{region}:{data_key}:{region_24h_rule_key}:" \
  163. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  164. region_24h_dup_key_name = \
  165. f"{config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H}{region}:{data_key}:{rule_key}:" \
  166. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  167. log_.info("开始去重【24小时 地域】,写入key的前缀是:{}".format(region_24h_dup_key_name))
  168. h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=region_24h_key_name,
  169. dup_key_name=region_24h_dup_key_name, region=region, political_filter=political_filter,
  170. shield_config=shield_config, dup_remove=dup_remove)
  171. # ##### 去重小程序相对24h更新结果,并另存为redis中
  172. h_24h_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H}{data_key}:{by_24h_rule_key}:" \
  173. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  174. h_24h_dup_key_name = \
  175. f"{config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H}{region}:{data_key}:{rule_key}:" \
  176. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  177. log_.info("开始去重【24小时 无地域】,写入key的前缀是:{}".format(region_24h_dup_key_name))
  178. h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=h_24h_key_name,
  179. dup_key_name=h_24h_dup_key_name, region=region, political_filter=political_filter,
  180. shield_config=shield_config, dup_remove=dup_remove)
  181. # ##### 去重小程序相对24h 筛选后剩余数据 更新结果,并另存为redis中
  182. other_h_24h_key_name = f"{config_.RECALL_KEY_NAME_PREFIX_BY_24H_OTHER}{data_key}:" \
  183. f"{by_24h_rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  184. other_h_24h_dup_key_name = \
  185. f"{config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H}{region}:{data_key}:{rule_key}:" \
  186. f"{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  187. log_.info("开始去重【24小时 无地域 other】,写入key的前缀是:{}".format(other_h_24h_dup_key_name))
  188. h_video_ids = dup_data(h_video_ids=h_video_ids, initial_key_name=other_h_24h_key_name,
  189. dup_key_name=other_h_24h_dup_key_name, region=region, political_filter=political_filter,
  190. shield_config=shield_config, dup_remove=dup_remove)
  191. def process_with_region(region, df_merged, data_key, rule_key, rule_param, now_date, now_h,
  192. rule_rank_h_flag, add_videos_with_pre_h, hour_count):
  193. log_.info(f"多协程的region = {region} 开始执行")
  194. video_rank(df=None, now_date=now_date, now_h=now_h, rule_key=rule_key, param=rule_param,
  195. region=region, data_key=data_key, rule_rank_h_flag=rule_rank_h_flag,
  196. add_videos_with_pre_h=add_videos_with_pre_h, hour_count=hour_count)
  197. log_.info(f"多协程的region = {region} 完成执行")
  198. def copy_data_for_city(region, city_code, data_key, rule_key, now_date, now_h, shield_config):
  199. """copy 对应数据到城市对应redis,并做相应屏蔽视频过滤"""
  200. log_.info(f"city_code = {city_code} start ...")
  201. redis_helper = RedisHelper()
  202. key_prefix_list = [
  203. config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H, # 地域小时级
  204. config_.RECALL_KEY_NAME_PREFIX_DUP1_REGION_24H_H, # 地域相对24h
  205. config_.RECALL_KEY_NAME_PREFIX_DUP2_REGION_24H_H, # 不区分地域相对24h
  206. config_.RECALL_KEY_NAME_PREFIX_DUP3_REGION_24H_H, # 不区分地域相对24h筛选后
  207. config_.RECALL_KEY_NAME_PREFIX_DUP_REGION_H, # rov大列表
  208. ]
  209. for key_prefix in key_prefix_list:
  210. region_key = f"{key_prefix}{region}:{data_key}:{rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  211. city_key = f"{key_prefix}{city_code}:{data_key}:{rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  212. if not redis_helper.key_exists(key_name=region_key):
  213. continue
  214. region_data = redis_helper.get_all_data_from_zset(key_name=region_key, with_scores=True)
  215. if not region_data:
  216. continue
  217. # 屏蔽视频过滤
  218. region_video_ids = [int(video_id) for video_id, _ in region_data]
  219. shield_key_name_list = shield_config.get(city_code, None)
  220. # shield_key_name_list = config_.SHIELD_CONFIG.get(city_code, None)
  221. if shield_key_name_list is not None:
  222. filtered_video_ids = filter_shield_video(video_ids=region_video_ids,
  223. shield_key_name_list=shield_key_name_list)
  224. else:
  225. filtered_video_ids = region_video_ids
  226. city_data = {}
  227. for video_id, score in region_data:
  228. if int(video_id) in filtered_video_ids:
  229. city_data[int(video_id)] = score
  230. if len(city_data) > 0:
  231. redis_helper.add_data_with_zset(key_name=city_key, data=city_data, expire_time=2 * 24 * 3600)
  232. log_.info(f"city_code = {city_code} end!")
  233. def process_with_param(param, data_params_item, rule_params_item, region_code_list, feature_df, now_date, now_h, rule_rank_h_flag):
  234. data_key = param.get('data')
  235. data_param = data_params_item.get(data_key)
  236. rule_key = param.get('rule')
  237. rule_param = rule_params_item.get(rule_key)
  238. merge_func = rule_param.get('merge_func', None)
  239. log_.info("数据采用:{},统计采用{}.".format(data_key, rule_key))
  240. log_.info("具体的规则是:{}.".format(rule_param))
  241. # 是否在地域小时级数据中增加打捞的优质视频
  242. add_videos_with_pre_h = rule_param.get('add_videos_with_pre_h', False)
  243. hour_count = rule_param.get('hour_count', 0)
  244. if merge_func == 2:
  245. pass
  246. else:
  247. task_list = [
  248. gevent.spawn(process_with_region,
  249. region, None, data_key, rule_key, rule_param, now_date, now_h, rule_rank_h_flag,
  250. add_videos_with_pre_h, hour_count)
  251. for region in region_code_list
  252. ]
  253. gevent.joinall(task_list)
  254. log_.info(f"多进程的 param = {param} 完成执行!")
  255. def rank_by_h(project, table, now_date, now_h, rule_params, region_code_list, rule_rank_h_flag):
  256. # 获取特征数据
  257. data_params_item = rule_params.get('data_params')
  258. rule_params_item = rule_params.get('rule_params')
  259. params_list = rule_params.get('params_list')
  260. pool = multiprocessing.Pool(processes=len(params_list))
  261. for param in params_list:
  262. pool.apply_async(
  263. func=process_with_param,
  264. args=(param, data_params_item, rule_params_item, region_code_list, None, now_date, now_h, rule_rank_h_flag)
  265. )
  266. pool.close()
  267. pool.join()
  268. def h_bottom_process(param, rule_params_item, region_code_list, key_prefix, redis_dt, redis_h,
  269. now_date, now_h, rule_rank_h_flag):
  270. redis_helper = RedisHelper()
  271. data_key = param.get('data')
  272. rule_key = param.get('rule')
  273. rule_param = rule_params_item.get(rule_key)
  274. log_.info(f"data_key = {data_key}, rule_key = {rule_key}, rule_param = {rule_param}")
  275. h_rule_key = rule_param.get('h_rule_key', None)
  276. region_24h_rule_key = rule_param.get('region_24h_rule_key', 'rule1')
  277. by_24h_rule_key = rule_param.get('24h_rule_key', None)
  278. by_48h_rule_key = rule_param.get('48h_rule_key', None)
  279. # 涉政视频过滤
  280. political_filter = param.get('political_filter', None)
  281. # 屏蔽视频过滤
  282. shield_config = param.get('shield_config', config_.SHIELD_CONFIG)
  283. dup_remove = param.get('dup_remove', True)
  284. for region in region_code_list:
  285. log_.info(f"region = {region}")
  286. key_name = f"{key_prefix}{region}:{data_key}:{rule_key}:{redis_dt}:{redis_h}"
  287. initial_data = redis_helper.get_all_data_from_zset(key_name=key_name, with_scores=True)
  288. if initial_data is None:
  289. initial_data = []
  290. final_data = dict()
  291. h_video_ids = []
  292. for video_id, score in initial_data:
  293. final_data[video_id] = score
  294. h_video_ids.append(int(video_id))
  295. # 存入对应的redis
  296. final_key_name = \
  297. f"{key_prefix}{region}:{data_key}:{rule_key}:{datetime.datetime.strftime(now_date, '%Y%m%d')}:{now_h}"
  298. if len(final_data) > 0:
  299. redis_helper.add_data_with_zset(key_name=final_key_name, data=final_data, expire_time=2 * 24 * 3600)
  300. # 与其他召回视频池去重,存入对应的redis
  301. dup_to_redis(h_video_ids=h_video_ids, now_date=now_date, now_h=now_h, rule_key=rule_key, h_rule_key=h_rule_key,
  302. region_24h_rule_key=region_24h_rule_key, region=region,
  303. data_key=data_key, by_24h_rule_key=by_24h_rule_key,
  304. by_48h_rule_key=by_48h_rule_key, rule_rank_h_flag=rule_rank_h_flag,
  305. political_filter=political_filter, shield_config=shield_config, dup_remove=dup_remove)
  306. # 特殊城市视频数据准备
  307. for region, city_list in config_.REGION_CITY_MAPPING.items():
  308. t = [
  309. gevent.spawn(
  310. copy_data_for_city,
  311. region, city_code, data_key, rule_key, now_date, now_h, shield_config
  312. )
  313. for city_code in city_list
  314. ]
  315. gevent.joinall(t)
  316. def h_rank_bottom(now_date, now_h, rule_params, region_code_list, rule_rank_h_flag):
  317. """未按时更新数据,用上一小时结果作为当前小时的数据"""
  318. # 获取rov模型结果
  319. # redis_helper = RedisHelper()
  320. if now_h == 0:
  321. redis_dt = datetime.datetime.strftime(now_date - datetime.timedelta(days=1), '%Y%m%d')
  322. redis_h = 23
  323. else:
  324. redis_dt = datetime.datetime.strftime(now_date, '%Y%m%d')
  325. redis_h = now_h - 1
  326. # 以上一小时的地域分组数据作为当前小时的数据
  327. key_prefix = config_.RECALL_KEY_NAME_PREFIX_REGION_BY_H
  328. rule_params_item = rule_params.get('rule_params')
  329. params_list = rule_params.get('params_list')
  330. pool = multiprocessing.Pool(processes=len(params_list))
  331. for param in params_list:
  332. pool.apply_async(
  333. func=h_bottom_process,
  334. args=(param, rule_params_item, region_code_list, key_prefix, redis_dt, redis_h, now_date, now_h, rule_rank_h_flag)
  335. )
  336. pool.close()
  337. pool.join()
  338. def h_timer_check():
  339. try:
  340. rule_rank_h_flag = "24h"
  341. rule_params = RULE_PARAMS
  342. project = config_.PROJECT_REGION_APP_TYPE
  343. table = config_.TABLE_REGION_APP_TYPE
  344. region_code_list = [code for region, code in region_code.items()]
  345. now_date = datetime.datetime.today()
  346. log_.info(f"开始执行: {datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
  347. now_h = datetime.datetime.now().hour
  348. now_min = datetime.datetime.now().minute
  349. if now_h == 0:
  350. log_.info("当前时间{}小时,使用bottom的data合并,开始。".format(now_h))
  351. h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list,
  352. rule_rank_h_flag=rule_rank_h_flag)
  353. log_.info("----------当前时间{}小时,使用bottom的data,完成----------".format(now_h))
  354. return
  355. # 查看当前小时更新的数据是否已准备好
  356. if now_min < 45:
  357. log_.info('开始正常合并')
  358. # 数据准备好,进行更新
  359. rank_by_h(now_date=now_date, now_h=now_h, rule_params=rule_params,
  360. project=project, table=table, region_code_list=region_code_list, rule_rank_h_flag=rule_rank_h_flag)
  361. log_.info("合并5----------正常完成----------")
  362. else:
  363. log_.info('当前合并分钟超过45,预计执行无法完成,使用 bottom data!')
  364. h_rank_bottom(now_date=now_date, now_h=now_h, rule_params=rule_params, region_code_list=region_code_list,
  365. rule_rank_h_flag=rule_rank_h_flag)
  366. log_.info('----------当前分钟超过45,使用bottom的data,完成----------')
  367. except Exception as e:
  368. log_.error(f"地域分组小时级数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
  369. send_msg_to_feishu(
  370. webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
  371. key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
  372. msg_text=f"rov-offline{config_.ENV_TEXT} - 地域分组小时级数据更新失败\n"
  373. f"exception: {e}\n"
  374. f"traceback: {traceback.format_exc()}"
  375. )
  376. if __name__ == '__main__':
  377. log_.info("文件alg_recsys_recall_aftermerge.py:「去重合并」 开始执行")
  378. h_timer_check()