zhangbo 1 年之前
父节点
当前提交
d139cb7491
共有 2 个文件被更改,包括 191 次插入1 次删除
  1. 190 0
      alg_recsys_rank_item_realtime_1day.py
  2. 1 1
      alg_recsys_rank_item_realtime_1h.py

+ 190 - 0
alg_recsys_rank_item_realtime_1day.py

@@ -0,0 +1,190 @@
+# -*- coding: utf-8 -*-
+import traceback
+import datetime
+from odps import ODPS
+from threading import Timer
+from utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
+from config import set_config
+from log import Log
+from alg_recsys_recall_4h_region_trend import records_process_for_list
+import json
+from datetime import datetime, timedelta
+import sys
+from utils import execute_sql_from_odps
+
+
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+REDIS_PREFIX = "item_rt_fea_1day_"
+
+def process_and_store(row):
+    video_id, json_str = row
+    key = REDIS_PREFIX + str(video_id)
+    expire_time = 24 * 3600 * 2
+    redis_helper.set_data_to_redis(key, json_str, expire_time)
+    # log_.info("video写入数据key={},value={}".format(key, json_str))
+
+def check_data(project, table, partition) -> int:
+    """检查数据是否准备好,输出数据条数"""
+    odps = ODPS(
+        access_id=config_.ODPS_CONFIG['ACCESSID'],
+        secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
+        project=project,
+        endpoint=config_.ODPS_CONFIG['ENDPOINT'],
+        connect_timeout=3000,
+        read_timeout=500000,
+        pool_maxsize=1000,
+        pool_connections=1000
+    )
+    try:
+        t = odps.get_table(name=table)
+        log_.info(f"检查分区是否存在-【 dt={partition} 】")
+        check_res = t.exist_partition(partition_spec=f'dt={partition}')
+        if check_res:
+            sql = f'select * from {project}.{table} where dt = {partition}'
+            log_.info(sql)
+            with odps.execute_sql(sql=sql).open_reader() as reader:
+                data_count = reader.count
+        else:
+            log_.info("表{}分区{}不存在".format(table, partition))
+            data_count = 0
+    except Exception as e:
+        log_.error("table:{},partition:{} no data. return data_count=0:{}".format(table, partition, e))
+        data_count = 0
+    return data_count
+
+def get_sql(date, previous_date_str, project):
+    sql = '''
+    SELECT  videoid
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_pv))) AS view_pv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",view_uv))) AS view_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_pv))) AS play_pv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",play_uv))) AS play_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_pv))) AS share_pv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",share_uv))) AS share_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",return_uv))) AS return_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_uv))) AS p_view_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_view_pv))) AS p_view_pv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",p_return_uv))) AS p_return_uv_list_1day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_uv))) AS share_uv_list_2day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",2day_share_pv))) AS share_pv_list_2day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_uv))) AS share_uv_list_3day
+            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(dt,":",3day_share_pv))) AS share_pv_list_3day
+    FROM    (
+                SELECT  videoid
+                        ,dt
+                        ,SUM(view次数) AS view_pv
+                        ,SUM(view人数) AS view_uv
+                        ,SUM(play次数) AS play_pv
+                        ,SUM(play人数) AS play_uv
+                        ,SUM(share次数) AS share_pv
+                        ,SUM(share人数) AS share_uv
+                        ,SUM(回流人数) AS return_uv
+                        ,SUM(platform_view) AS p_view_uv
+                        ,SUM(platform_view_total) AS p_view_pv
+                        ,SUM(platform_return) AS p_return_uv
+                        ,SUM(lasttwodays_share) AS 2day_share_uv
+                        ,SUM(lasttwodays_share_total) AS 2day_share_pv
+                        ,SUM(lastthreedays_share) AS 3day_share_uv
+                        ,SUM(lastthreedays_share_total) AS 3day_share_pv
+                FROM    loghubods.video_data_each_hour_dataset_24h_total_apptype
+                WHERE   dt <= '{}23'
+                AND     dt >= '{}00'
+                GROUP BY videoid
+                         ,dt
+            ) 
+    GROUP BY videoid
+    '''.format(date, previous_date_str)
+    print("sql:" + sql)
+    records = execute_sql_from_odps(project=project, sql=sql)
+    video_list = []
+    with records.open_reader() as reader:
+        for record in reader:
+            video_id = record['videoid']
+            m = dict()
+            try:
+                m["view_pv_list_1day"] = record['view_pv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["view_uv_list_1day"] = record['view_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["play_pv_list_1day"] = record['play_pv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["play_uv_list_1day"] = record['play_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["share_pv_list_1day"] = record['share_pv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["share_uv_list_1day"] = record['share_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["return_uv_list_1day"] = record['return_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["p_view_pv_list_1day"] = record['p_view_pv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["p_view_uv_list_1day"] = record['p_view_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            try:
+                m["p_return_uv_list_1day"] = record['p_return_uv_list_1day']
+            except Exception as e:
+                log_.error(e)
+            json_str = json.dumps(m)
+            video_list.append([video_id, json_str])
+    return video_list
+
+
+def h_timer_check():
+    try:
+        date = sys.argv[1]
+        hour = sys.argv[2]
+    except Exception as e:
+        now_date = datetime.today()
+        date = datetime.strftime(now_date, '%Y%m%d')
+        hour = datetime.now().hour
+        log_.info("没有读取到参数,采用系统时间,报错info:{}".format(e))
+    # 1 判断上游数据表是否生产完成
+    project = "loghubods"
+    table = "video_data_each_hour_dataset_24h_total_apptype"
+    partition = str(date) + str(hour)
+    table_data_cnt = check_data(project, table, partition)
+    if table_data_cnt == 0:
+        log_.info("上游数据{}未就绪{},等待...".format(table, partition))
+        Timer(60, h_timer_check).start()
+    else:
+        log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
+        # 2 读取数据表 处理特征
+        previous_date_str = (datetime.strptime(date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
+        video_list = get_sql(date, previous_date_str, project)
+        # 3 写入redis
+        log_.info("video的数据量:{}".format(len(video_list)))
+        records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)
+
+
+
+
+if __name__ == '__main__':
+    log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
+    h_timer_check()
+    log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
+
+
+
+
+# cd /root/zhangbo/rov-offline
+# python alg_recsys_rank_item_realtime_1day.py 20240117 20

+ 1 - 1
alg_recsys_rank_item_realtime_feature.py → alg_recsys_rank_item_realtime_1h.py

@@ -167,4 +167,4 @@ if __name__ == '__main__':
 
 
 # cd /root/zhangbo/rov-offline
-# python alg_recsys_rank_item_realtime_feature.py 20240117 20
+# python alg_recsys_rank_item_realtime_1h.py 20240117 20