123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207 |
- # -*- coding: utf-8 -*-
- import traceback
- import datetime
- from odps import ODPS
- from threading import Timer
- from my_utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
- from my_config import set_config
- from log import Log
- from alg_recsys_recall_4h_region_trend import records_process_for_list
- import json
- from datetime import datetime, timedelta
- import sys
- from my_utils import execute_sql_from_odps
- config_, _ = set_config()
- log_ = Log()
- redis_helper = RedisHelper()
- REDIS_PREFIX = "item_rt_fea_1day_"
- def process_and_store(row):
- video_id, json_str = row
- key = REDIS_PREFIX + str(video_id)
- expire_time = 24 * 3600
- redis_helper.set_data_to_redis(key, json_str, expire_time)
- # log_.info("video写入数据key={},value={}".format(key, json_str))
- def check_data(project, table, partition) -> int:
- """检查数据是否准备好,输出数据条数"""
- odps = ODPS(
- access_id=config_.ODPS_CONFIG['ACCESSID'],
- secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
- project=project,
- endpoint=config_.ODPS_CONFIG['ENDPOINT'],
- connect_timeout=3000,
- read_timeout=500000,
- pool_maxsize=1000,
- pool_connections=1000
- )
- try:
- t = odps.get_table(name=table)
- log_.info(f"检查分区是否存在-【 dt={partition} 】")
- check_res = t.exist_partition(partition_spec=f'dt={partition}')
- if check_res:
- sql = f'select * from {project}.{table} where dt = {partition}'
- log_.info(sql)
- with odps.execute_sql(sql=sql).open_reader() as reader:
- data_count = reader.count
- else:
- log_.info("表{}分区{}不存在".format(table, partition))
- data_count = 0
- except Exception as e:
- log_.error("table:{},partition:{} no data. return data_count=0:{}".format(
- table, partition, e))
- data_count = 0
- return data_count
- def get_sql(date, previous_date_str, project):
- sql = '''
- SELECT videoid
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_pv))) AS view_pv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_uv))) AS view_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_pv))) AS play_pv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_uv))) AS play_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_pv))) AS share_pv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_uv))) AS share_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return_uv))) AS return_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_uv))) AS p_view_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_pv))) AS p_view_pv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_return_uv))) AS p_return_uv_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_uv))) AS share_uv_list_2day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_pv))) AS share_pv_list_2day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_uv))) AS share_uv_list_3day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_pv))) AS share_pv_list_3day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",1day_sharedepth_max_avg))) AS sharedepth_max_avg_list_1day
- ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",1day_sharewidth_max_avg))) AS sharewidth_max_avg_list_1day
- FROM (
- SELECT videoid
- ,dt
- ,SUM(view次数) AS view_pv
- ,SUM(view人数) AS view_uv
- ,SUM(play次数) AS play_pv
- ,SUM(play人数) AS play_uv
- ,SUM(share次数) AS share_pv
- ,SUM(share人数) AS share_uv
- ,SUM(回流人数) AS return_uv
- ,SUM(platform_view) AS p_view_uv
- ,SUM(platform_view_total) AS p_view_pv
- ,SUM(platform_return) AS p_return_uv
- ,SUM(lasttwodays_share) AS 2day_share_uv
- ,SUM(lasttwodays_share_total) AS 2day_share_pv
- ,SUM(lastthreedays_share) AS 3day_share_uv
- ,SUM(lastthreedays_share_total) AS 3day_share_pv
- ,SUM(sharedepth_max_avg) AS 1day_sharedepth_max_avg
- ,SUM(sharewidth_max_avg) AS 1day_sharewidth_max_avg
- FROM loghubods.video_data_each_hour_dataset_24h_total_apptype
- WHERE dt <= '{}23'
- AND dt >= '{}00'
- GROUP BY videoid
- ,dt
- )
- GROUP BY videoid
- '''.format(date, previous_date_str)
- print("sql:" + sql)
- records = execute_sql_from_odps(project=project, sql=sql)
- video_list = []
- with records.open_reader() as reader:
- for record in reader:
- video_id = record['videoid']
- m = dict()
- try:
- m["view_pv_list_1day"] = record['view_pv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["view_uv_list_1day"] = record['view_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["play_pv_list_1day"] = record['play_pv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["play_uv_list_1day"] = record['play_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["share_pv_list_1day"] = record['share_pv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["share_uv_list_1day"] = record['share_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["return_uv_list_1day"] = record['return_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["p_view_pv_list_1day"] = record['p_view_pv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["p_view_uv_list_1day"] = record['p_view_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["p_return_uv_list_1day"] = record['p_return_uv_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["sharedepth_max_avg_list_1day"] = record['sharedepth_max_avg_list_1day']
- except Exception as e:
- log_.error(e)
- try:
- m["sharewidth_max_avg_list_1day"] = record['sharewidth_max_avg_list_1day']
- except Exception as e:
- log_.error(e)
- json_str = json.dumps(m)
- video_list.append([video_id, json_str])
- return video_list
- def h_timer_check():
- try:
- date = sys.argv[1]
- hour = sys.argv[2]
- except Exception as e:
- now_date = datetime.today()
- date = datetime.strftime(now_date, '%Y%m%d')
- hour = datetime.now().hour
- log_.info("没有读取到参数,采用系统时间,报错info:{}".format(e))
- # 1 判断上游数据表是否生产完成
- project = "loghubods"
- table = "video_data_each_hour_dataset_24h_total_apptype"
- partition = str(date) + str(hour)
- table_data_cnt = check_data(project, table, partition)
- if table_data_cnt == 0:
- log_.info("上游数据{}未就绪{},等待...".format(table, partition))
- Timer(60, h_timer_check).start()
- else:
- log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
- # 2 读取数据表 处理特征
- previous_date_str = (datetime.strptime(
- date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
- video_list = get_sql(date, previous_date_str, project)
- # 3 写入redis
- log_.info("video的数据量:{}".format(len(video_list)))
- records_process_for_list(
- video_list, process_and_store, max_size=50, num_workers=8)
- redis_helper.set_data_to_redis(
- REDIS_PREFIX + "partition", partition, 24 * 3600)
- if __name__ == '__main__':
- log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
- h_timer_check()
- log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
- # cd /root/zhangbo/rov-offline
- # python alg_recsys_rank_item_realtime_1day.py 20240117 20
|