|
@@ -0,0 +1,167 @@
|
|
|
+# -*- coding: utf-8 -*-
|
|
|
+import traceback
|
|
|
+import datetime
|
|
|
+from odps import ODPS
|
|
|
+from threading import Timer
|
|
|
+from utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
|
|
|
+from config import set_config
|
|
|
+from log import Log
|
|
|
+from alg_recsys_recall_4h_region_trend import records_process_for_list
|
|
|
+import json
|
|
|
+from datetime import datetime, timedelta
|
|
|
+import sys
|
|
|
+from utils import execute_sql_from_odps
|
|
|
+
|
|
|
+
|
|
|
+config_, _ = set_config()
|
|
|
+log_ = Log()
|
|
|
+redis_helper = RedisHelper()
|
|
|
+
|
|
|
+REDIS_PREFIX = "item_rt_fea_1hroot_"
|
|
|
+
|
|
|
+def process_and_store(row):
|
|
|
+ video_id, json_str = row
|
|
|
+ key = REDIS_PREFIX + str(video_id)
|
|
|
+ expire_time = 6 * 3600
|
|
|
+ redis_helper.set_data_to_redis(key, json_str, expire_time)
|
|
|
+ # log_.info("video写入数据key={},value={}".format(key, json_str))
|
|
|
+
|
|
|
+def check_data(project, table, partition) -> int:
|
|
|
+ """检查数据是否准备好,输出数据条数"""
|
|
|
+ odps = ODPS(
|
|
|
+ access_id=config_.ODPS_CONFIG['ACCESSID'],
|
|
|
+ secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
|
|
|
+ project=project,
|
|
|
+ endpoint=config_.ODPS_CONFIG['ENDPOINT'],
|
|
|
+ connect_timeout=3000,
|
|
|
+ read_timeout=500000,
|
|
|
+ pool_maxsize=1000,
|
|
|
+ pool_connections=1000
|
|
|
+ )
|
|
|
+ try:
|
|
|
+ t = odps.get_table(name=table)
|
|
|
+ log_.info(f"检查分区是否存在-【 dt={partition} 】")
|
|
|
+ check_res = t.exist_partition(partition_spec=f'dt={partition}')
|
|
|
+ if check_res:
|
|
|
+ sql = f'select * from {project}.{table} where dt = {partition}'
|
|
|
+ log_.info(sql)
|
|
|
+ with odps.execute_sql(sql=sql).open_reader() as reader:
|
|
|
+ data_count = reader.count
|
|
|
+ else:
|
|
|
+ log_.info("表{}分区{}不存在".format(table, partition))
|
|
|
+ data_count = 0
|
|
|
+ except Exception as e:
|
|
|
+ log_.error("table:{},partition:{} no data. return data_count=0:{}".format(table, partition, e))
|
|
|
+ data_count = 0
|
|
|
+ return data_count
|
|
|
+
|
|
|
+def get_sql(date, previous_date_str, project):
|
|
|
+ sql = '''
|
|
|
+ SELECT videoid
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return))) AS return
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",`view`))) AS `view`
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share))) AS share
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return1))) AS return1
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return2))) AS return2
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return3))) AS return3
|
|
|
+ ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return4))) AS return4
|
|
|
+ FROM (
|
|
|
+ SELECT videoid
|
|
|
+ ,dt
|
|
|
+ ,return_all as return
|
|
|
+ ,lastonehour_recommend_view as `view`
|
|
|
+ ,lastonehour_recommend_share as share
|
|
|
+ ,lastonehour_recommend_return1 as return1
|
|
|
+ ,lastonehour_recommend_return2n as return2
|
|
|
+ ,last2nhour_return1 as return3
|
|
|
+ ,last2nhour_return2n as return4
|
|
|
+ FROM loghubods.video_return_composition_1hour
|
|
|
+ WHERE dt <= '{}23'
|
|
|
+ AND dt >= '{}00'
|
|
|
+ GROUP BY videoid
|
|
|
+ ,dt
|
|
|
+ )
|
|
|
+ GROUP BY videoid
|
|
|
+ '''.format(date, previous_date_str)
|
|
|
+ print("sql:" + sql)
|
|
|
+ records = execute_sql_from_odps(project=project, sql=sql)
|
|
|
+ video_list = []
|
|
|
+ with records.open_reader() as reader:
|
|
|
+ for record in reader:
|
|
|
+ video_id = record['videoid']
|
|
|
+ m = dict()
|
|
|
+ try:
|
|
|
+ m["return"] = record['return']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["view"] = record['view']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["share"] = record['share']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["return1"] = record['return1']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["return2"] = record['return2']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["return3"] = record['return3']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+ try:
|
|
|
+ m["return4"] = record['return4']
|
|
|
+ except Exception as e:
|
|
|
+ log_.error(e)
|
|
|
+
|
|
|
+ json_str = json.dumps(m)
|
|
|
+ video_list.append([video_id, json_str])
|
|
|
+ return video_list
|
|
|
+
|
|
|
+
|
|
|
+def h_timer_check():
|
|
|
+ try:
|
|
|
+ date = sys.argv[1]
|
|
|
+ hour = sys.argv[2]
|
|
|
+ except Exception as e:
|
|
|
+ now_date = datetime.today()
|
|
|
+ date = datetime.strftime(now_date, '%Y%m%d')
|
|
|
+ hour = datetime.now().hour
|
|
|
+ log_.info("没有读取到参数,采用系统时间:{}".format(e))
|
|
|
+ # 1 判断上游数据表是否生产完成
|
|
|
+ project = "loghubods"
|
|
|
+ table = "video_return_composition_1hour"
|
|
|
+ partition = str(date) + str(hour)
|
|
|
+ table_data_cnt = check_data(project, table, partition)
|
|
|
+ if table_data_cnt == 0:
|
|
|
+ log_.info("上游数据{}未就绪{},等待...".format(table, partition))
|
|
|
+ Timer(60, h_timer_check).start()
|
|
|
+ else:
|
|
|
+ log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
|
|
|
+ # 2 读取数据表 处理特征
|
|
|
+ previous_date_str = (datetime.strptime(date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
|
|
|
+ video_list = get_sql(date, previous_date_str, project)
|
|
|
+ # 3 写入redis
|
|
|
+ log_.info("video的数据量:{}".format(len(video_list)))
|
|
|
+ records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
|
|
|
+
|
|
|
+ redis_helper.set_data_to_redis(REDIS_PREFIX + "partition", partition, 24 * 3600)
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+if __name__ == '__main__':
|
|
|
+ log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
+ h_timer_check()
|
|
|
+ log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+# cd /root/zhangbo/rov-offline
|
|
|
+# python alg_recsys_rank_item_realtime_1h.py 20240117 20
|