alg_recsys_rank_item_realtime_1h.py 8.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. # -*- coding: utf-8 -*-
  2. import os
  3. import sys
  4. root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
  5. if root_dir not in sys.path:
  6. sys.path.append(root_dir)
  7. print("******** sys.path ********")
  8. print(sys.path)
  9. from multiprocessing import Process
  10. from odps import ODPS
  11. from threading import Timer
  12. import threading
  13. from my_utils import RedisHelper, execute_sql_from_odps
  14. from my_config import set_config
  15. from log import Log
  16. import json
  17. from datetime import datetime
  18. from queue import Queue
  19. from tqdm import tqdm
  20. import time
  21. config_, _ = set_config()
  22. log_ = Log()
  23. redis_helper = RedisHelper()
  24. REDIS_PREFIX = "item_rt_fea_1h_"
  25. EXPIRE_TIME = 24 * 3600
  26. TIME_LIMIT_TABLE = 1
  27. TIME_LIMIT_TASK = 2
  28. def worker(queue, executor):
  29. while True:
  30. row = queue.get()
  31. if row is None: # 结束信号
  32. queue.task_done()
  33. break
  34. executor(row)
  35. queue.task_done()
  36. def records_process_for_list(records, executor, max_size=50, num_workers=10):
  37. # 创建一个线程安全的队列
  38. queue = Queue(maxsize=max_size) # 可以调整 maxsize 以控制内存使用
  39. # 设置线程池大小
  40. num_workers = num_workers
  41. # 启动工作线程
  42. threads = []
  43. for _ in range(num_workers):
  44. t = threading.Thread(target=worker, args=(queue, executor))
  45. t.start()
  46. threads.append(t)
  47. # 读取数据并放入队列
  48. for row in tqdm(records):
  49. queue.put(row)
  50. # 发送结束信号
  51. for _ in range(num_workers):
  52. queue.put(None)
  53. # 等待所有任务完成
  54. queue.join()
  55. # 等待所有工作线程结束
  56. for t in threads:
  57. t.join()
  58. def process_and_store(row):
  59. table_key, json_str = row
  60. key = REDIS_PREFIX + str(table_key)
  61. expire_time = EXPIRE_TIME
  62. redis_helper.set_data_to_redis(key, json_str, expire_time)
  63. def check_data(project, table, partition) -> int:
  64. """检查数据是否准备好,输出数据条数"""
  65. odps = ODPS(
  66. access_id=config_.ODPS_CONFIG['ACCESSID'],
  67. secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
  68. project=project,
  69. endpoint=config_.ODPS_CONFIG['ENDPOINT'],
  70. connect_timeout=3000,
  71. read_timeout=500000,
  72. pool_maxsize=1000,
  73. pool_connections=1000
  74. )
  75. try:
  76. t = odps.get_table(name=table)
  77. log_.info(f"检查分区是否存在-【 dt={partition} 】")
  78. check_res = t.exist_partition(partition_spec=f'dt={partition}')
  79. if check_res:
  80. sql = f'select * from {project}.{table} where dt = {partition}'
  81. log_.info(sql)
  82. with odps.execute_sql(sql=sql).open_reader() as reader:
  83. data_count = reader.count
  84. else:
  85. log_.info("表{}分区{}不存在".format(table, partition))
  86. data_count = 0
  87. except Exception as e:
  88. log_.error("table:{},partition:{} no data. return data_count=0,报错原因是:{}".format(table, partition, e))
  89. data_count = 0
  90. return data_count
  91. def get_sql(date, previous_date_str, project):
  92. sql = '''
  93. SELECT videoid
  94. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_uv))) AS view_uv_list_1h
  95. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_pv))) AS view_pv_list_1h
  96. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_uv))) AS play_uv_list_1h
  97. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_pv))) AS play_pv_list_1h
  98. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_uv))) AS share_uv_list_1h
  99. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_pv))) AS share_pv_list_1h
  100. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return_uv))) AS return_uv_list_1h
  101. ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_return_uv))) AS p_return_uv_list_1h
  102. FROM (
  103. SELECT videoid
  104. ,dt
  105. ,SUM(lastonehour_view) AS view_uv
  106. ,SUM(lastonehour_view_total) AS view_pv
  107. ,SUM(lastonehour_play) AS play_uv
  108. ,SUM(lastonehour_play_total) AS play_pv
  109. ,SUM(lastonehour_share) AS share_uv
  110. ,SUM(lastonehour_share_total) AS share_pv
  111. ,SUM(lastonehour_return) AS return_uv
  112. ,SUM(platform_return) AS p_return_uv
  113. FROM loghubods.video_each_hour_update_no_province_apptype
  114. WHERE dt <= '{}23'
  115. AND dt >= '{}00'
  116. GROUP BY videoid
  117. ,dt
  118. )
  119. GROUP BY videoid
  120. '''.format(date, previous_date_str)
  121. print("sql:" + sql)
  122. records = execute_sql_from_odps(project=project, sql=sql)
  123. video_list = []
  124. with records.open_reader() as reader:
  125. for record in reader:
  126. video_id = record['videoid']
  127. m = dict()
  128. try:
  129. m["view_uv_list_1h"] = record['view_uv_list_1h']
  130. except Exception as e:
  131. log_.error(e)
  132. try:
  133. m["view_pv_list_1h"] = record['view_pv_list_1h']
  134. except Exception as e:
  135. log_.error(e)
  136. try:
  137. m["play_uv_list_1h"] = record['play_uv_list_1h']
  138. except Exception as e:
  139. log_.error(e)
  140. try:
  141. m["play_pv_list_1h"] = record['play_pv_list_1h']
  142. except Exception as e:
  143. log_.error(e)
  144. try:
  145. m["share_uv_list_1h"] = record['share_uv_list_1h']
  146. except Exception as e:
  147. log_.error(e)
  148. try:
  149. m["share_pv_list_1h"] = record['share_pv_list_1h']
  150. except Exception as e:
  151. log_.error(e)
  152. try:
  153. m["return_uv_list_1h"] = record['return_uv_list_1h']
  154. except Exception as e:
  155. log_.error(e)
  156. try:
  157. m["p_return_uv_list_1h"] = record['p_return_uv_list_1h']
  158. except Exception as e:
  159. log_.error(e)
  160. json_str = json.dumps(m)
  161. video_list.append([video_id, json_str])
  162. return video_list
  163. def main():
  164. try:
  165. date = sys.argv[1]
  166. hour = sys.argv[2]
  167. except Exception as e:
  168. date = datetime.now().strftime('%Y%m%d')
  169. hour = datetime.now().hour
  170. log_.info("没有读取到参数,采用系统时间: {}".format(e))
  171. log_.info("使用时间参数-日期:{},小时:{}".format(date, str(hour)))
  172. if hour in []:
  173. log_.info(f"hour={hour}不执行,直接返回。")
  174. return
  175. # 1 判断上游数据表是否生产完成
  176. project = "loghubods"
  177. table = "video_each_hour_update_no_province_apptype"
  178. partition = str(date) + str(hour)
  179. run_flag = True
  180. begin_ts = int(time.time())
  181. table_data_cnt = 0
  182. while run_flag:
  183. if int(time.time()) - begin_ts >= 60 * TIME_LIMIT_TABLE:
  184. log_.info("等待上游数据超过40分钟了,认为失败退出:过了{}秒。".format(int(time.time()) - begin_ts))
  185. sys.exit(1)
  186. table_data_cnt = check_data(project, table, partition)
  187. if table_data_cnt == 0:
  188. log_.info("上游数据{}未就绪{}/{},等待...".format(table, date, hour))
  189. log_.info("等待2分钟")
  190. time.sleep(60 * 2)
  191. else:
  192. run_flag = False
  193. log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
  194. # 2 读取数据表 处理特征
  195. previous_date_str = (datetime.strptime(date, "%Y%m%d") - datetime.timedelta(days=1)).strftime("%Y%m%d")
  196. video_list = get_sql(date, previous_date_str, project)
  197. # 3 写入redis
  198. records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
  199. redis_helper.set_data_to_redis(REDIS_PREFIX + "partition", partition, 24 * 3600)
  200. if __name__ == '__main__':
  201. log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  202. process = Process(target=main)
  203. process.start()
  204. # 等待子进程完成或超时
  205. timeout = 60 * TIME_LIMIT_TASK
  206. process.join(timeout=timeout) # 设置超时为3600秒(1小时)
  207. if process.is_alive():
  208. print("脚本执行时间超过1小时,执行失败,经过了{}秒。".format(timeout))
  209. process.terminate() # 终止子进程
  210. sys.exit(1) # 直接退出主进程并返回状态码999
  211. log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
  212. # cd /root/zhangbo/rov-offline
  213. # python alg_recsys_rank_item_realtime_1h.py 20240702 14