|
@@ -1,16 +1,26 @@
|
|
|
# -*- coding: utf-8 -*-
|
|
|
-import traceback
|
|
|
-import datetime
|
|
|
+import os
|
|
|
+import sys
|
|
|
+root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
|
+if root_dir not in sys.path:
|
|
|
+ sys.path.append(root_dir)
|
|
|
+ print("******** sys.path ********")
|
|
|
+ print(sys.path)
|
|
|
+
|
|
|
+from multiprocessing import Process
|
|
|
from odps import ODPS
|
|
|
from threading import Timer
|
|
|
-from my_utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
|
|
|
+import threading
|
|
|
+from my_utils import RedisHelper, execute_sql_from_odps
|
|
|
from my_config import set_config
|
|
|
from log import Log
|
|
|
-from alg_recsys_recall_4h_region_trend import records_process_for_list
|
|
|
import json
|
|
|
-from datetime import datetime, timedelta
|
|
|
-import sys
|
|
|
-from my_utils import execute_sql_from_odps
|
|
|
+from datetime import datetime
|
|
|
+from queue import Queue
|
|
|
+from tqdm import tqdm
|
|
|
+import time
|
|
|
+
|
|
|
+
|
|
|
|
|
|
|
|
|
config_, _ = set_config()
|
|
@@ -18,13 +28,43 @@ log_ = Log()
|
|
|
redis_helper = RedisHelper()
|
|
|
|
|
|
REDIS_PREFIX = "item_rt_fea_1h_"
|
|
|
-
|
|
|
+EXPIRE_TIME = 24 * 3600
|
|
|
+
|
|
|
+def worker(queue, executor):
|
|
|
+ while True:
|
|
|
+ row = queue.get()
|
|
|
+ if row is None: # 结束信号
|
|
|
+ queue.task_done()
|
|
|
+ break
|
|
|
+ executor(row)
|
|
|
+ queue.task_done()
|
|
|
+def records_process_for_list(records, executor, max_size=50, num_workers=10):
|
|
|
+ # 创建一个线程安全的队列
|
|
|
+ queue = Queue(maxsize=max_size) # 可以调整 maxsize 以控制内存使用
|
|
|
+ # 设置线程池大小
|
|
|
+ num_workers = num_workers
|
|
|
+ # 启动工作线程
|
|
|
+ threads = []
|
|
|
+ for _ in range(num_workers):
|
|
|
+ t = threading.Thread(target=worker, args=(queue, executor))
|
|
|
+ t.start()
|
|
|
+ threads.append(t)
|
|
|
+ # 读取数据并放入队列
|
|
|
+ for row in tqdm(records):
|
|
|
+ queue.put(row)
|
|
|
+ # 发送结束信号
|
|
|
+ for _ in range(num_workers):
|
|
|
+ queue.put(None)
|
|
|
+ # 等待所有任务完成
|
|
|
+ queue.join()
|
|
|
+ # 等待所有工作线程结束
|
|
|
+ for t in threads:
|
|
|
+ t.join()
|
|
|
def process_and_store(row):
|
|
|
- video_id, json_str = row
|
|
|
- key = REDIS_PREFIX + str(video_id)
|
|
|
- expire_time = 24 * 3600
|
|
|
+ table_key, json_str = row
|
|
|
+ key = REDIS_PREFIX + str(table_key)
|
|
|
+ expire_time = EXPIRE_TIME
|
|
|
redis_helper.set_data_to_redis(key, json_str, expire_time)
|
|
|
- # log_.info("video写入数据key={},value={}".format(key, json_str))
|
|
|
|
|
|
def check_data(project, table, partition) -> int:
|
|
|
"""检查数据是否准备好,输出数据条数"""
|
|
@@ -51,7 +91,7 @@ def check_data(project, table, partition) -> int:
|
|
|
log_.info("表{}分区{}不存在".format(table, partition))
|
|
|
data_count = 0
|
|
|
except Exception as e:
|
|
|
- log_.error("table:{},partition:{} no data. return data_count=0:{}".format(table, partition, e))
|
|
|
+ log_.error("table:{},partition:{} no data. return data_count=0,报错原因是:{}".format(table, partition, e))
|
|
|
data_count = 0
|
|
|
return data_count
|
|
|
|
|
@@ -129,44 +169,61 @@ def get_sql(date, previous_date_str, project):
|
|
|
return video_list
|
|
|
|
|
|
|
|
|
-def h_timer_check():
|
|
|
+def main():
|
|
|
try:
|
|
|
date = sys.argv[1]
|
|
|
hour = sys.argv[2]
|
|
|
except Exception as e:
|
|
|
- now_date = datetime.today()
|
|
|
- date = datetime.strftime(now_date, '%Y%m%d')
|
|
|
+ date = datetime.now().strftime('%Y%m%d')
|
|
|
hour = datetime.now().hour
|
|
|
- log_.info("没有读取到参数,采用系统时间,报错info:{}".format(e))
|
|
|
+ log_.info("没有读取到参数,采用系统时间: {}".format(e))
|
|
|
+ log_.info("使用时间参数-日期:{},小时:{}".format(date, str(hour)))
|
|
|
+ if hour in []:
|
|
|
+ log_.info(f"hour={hour}不执行,直接返回。")
|
|
|
+ return
|
|
|
# 1 判断上游数据表是否生产完成
|
|
|
project = "loghubods"
|
|
|
table = "video_each_hour_update_no_province_apptype"
|
|
|
partition = str(date) + str(hour)
|
|
|
- table_data_cnt = check_data(project, table, partition)
|
|
|
- if table_data_cnt == 0:
|
|
|
- log_.info("上游数据{}未就绪{},等待...".format(table, partition))
|
|
|
- Timer(60, h_timer_check).start()
|
|
|
- else:
|
|
|
- log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
|
|
|
- # 2 读取数据表 处理特征
|
|
|
- previous_date_str = (datetime.strptime(date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
|
|
|
- video_list = get_sql(date, previous_date_str, project)
|
|
|
- # 3 写入redis
|
|
|
- log_.info("video的数据量:{}".format(len(video_list)))
|
|
|
- records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
|
|
|
-
|
|
|
- redis_helper.set_data_to_redis(REDIS_PREFIX + "partition", partition, 24 * 3600)
|
|
|
-
|
|
|
-
|
|
|
+ run_flag = True
|
|
|
+ begin_ts = int(time.time())
|
|
|
+ table_data_cnt = 0
|
|
|
+ while run_flag:
|
|
|
+ if int(time.time()) - begin_ts >= 60 * 40:
|
|
|
+ log_.info("等待上游数据超过40分钟了,认为失败退出:过了{}秒。".format(int(time.time()) - begin_ts))
|
|
|
+ sys.exit(1)
|
|
|
+ table_data_cnt = check_data(project, table, partition)
|
|
|
+ if table_data_cnt == 0:
|
|
|
+ log_.info("上游数据{}未就绪{}/{},等待...".format(table, date, hour))
|
|
|
+ log_.info("等待2分钟")
|
|
|
+ time.sleep(60 * 2)
|
|
|
+ else:
|
|
|
+ run_flag = False
|
|
|
|
|
|
+ log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
|
|
|
+ # 2 读取数据表 处理特征
|
|
|
+ previous_date_str = (datetime.strptime(date, "%Y%m%d") - datetime.timedelta(days=1)).strftime("%Y%m%d")
|
|
|
+ video_list = get_sql(date, previous_date_str, project)
|
|
|
+ # 3 写入redis
|
|
|
+ records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
|
|
|
+ redis_helper.set_data_to_redis(REDIS_PREFIX + "partition", partition, 24 * 3600)
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
- h_timer_check()
|
|
|
+ process = Process(target=main)
|
|
|
+ process.start()
|
|
|
+ # 等待子进程完成或超时
|
|
|
+ timeout = 3600
|
|
|
+ process.join(timeout=timeout) # 设置超时为3600秒(1小时)
|
|
|
+ if process.is_alive():
|
|
|
+ print("脚本执行时间超过1小时,执行失败,经过了{}秒。".format(timeout))
|
|
|
+ process.terminate() # 终止子进程
|
|
|
+ sys.exit(1) # 直接退出主进程并返回状态码999
|
|
|
log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+
|
|
|
# cd /root/zhangbo/rov-offline
|
|
|
# python alg_recsys_rank_item_realtime_1h.py 20240117 20
|