# -*- coding: utf-8 -*-
import traceback
import datetime
from odps import ODPS
from threading import Timer
from my_utils import RedisHelper, get_data_from_odps, send_msg_to_feishu
from my_config import set_config
from log import Log
from alg_recsys_recall_4h_region_trend import records_process_for_list
import json
from datetime import datetime, timedelta
import sys
from my_utils import execute_sql_from_odps


config_, _ = set_config()
log_ = Log()
redis_helper = RedisHelper()

REDIS_PREFIX = "item_rt_fea_1hrootall_"

def process_and_store(row):
    video_id, json_str = row
    key = REDIS_PREFIX + str(video_id)
    expire_time = 6 * 3600
    redis_helper.set_data_to_redis(key, json_str, expire_time)
    # log_.info("video写入数据key={},value={}".format(key, json_str))

def check_data(project, table, partition) -> int:
    """检查数据是否准备好,输出数据条数"""
    odps = ODPS(
        access_id=config_.ODPS_CONFIG['ACCESSID'],
        secret_access_key=config_.ODPS_CONFIG['ACCESSKEY'],
        project=project,
        endpoint=config_.ODPS_CONFIG['ENDPOINT'],
        connect_timeout=3000,
        read_timeout=500000,
        pool_maxsize=1000,
        pool_connections=1000
    )
    try:
        t = odps.get_table(name=table)
        log_.info(f"检查分区是否存在-【 dt={partition} 】")
        check_res = t.exist_partition(partition_spec=f'dt={partition}')
        if check_res:
            sql = f'select * from {project}.{table} where dt = {partition}'
            log_.info(sql)
            with odps.execute_sql(sql=sql).open_reader() as reader:
                data_count = reader.count
        else:
            log_.info("表{}分区{}不存在".format(table, partition))
            data_count = 0
    except Exception as e:
        log_.error("table:{},partition:{} no data. return data_count=0:{}".format(table, partition, e))
        data_count = 0
    return data_count

def get_sql(partition, project):
    sql = '''
    SELECT  videoid
            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(time,":",exp))) AS exp
            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(time,":",share))) AS share
            ,CONCAT_WS(',',COLLECT_LIST(CONCAT(time,":",return))) AS return
    FROM    (
                SELECT  videoid
                        ,time
                        ,SUM(COALESCE(exp,0)) AS exp
                        ,SUM(COALESCE(share,0)) AS share
                        ,SUM(COALESCE(return,0)) AS return
                FROM    loghubods.alg_recsys_rank_root_info_v1
                WHERE   dt = '{}'
                and apptype not in ('12')
                GROUP BY videoid
                         ,time
            ) 
    GROUP BY videoid
    '''.format(partition)
    print("sql:" + sql)
    records = execute_sql_from_odps(project=project, sql=sql)
    video_list = []
    with records.open_reader() as reader:
        for record in reader:
            video_id = record['videoid']
            m = dict()
            try:
                m["exp"] = record['exp']
            except Exception as e:
                log_.error(e)
            try:
                m["share"] = record['share']
            except Exception as e:
                log_.error(e)
            try:
                m["return"] = record['return']
            except Exception as e:
                log_.error(e)

            json_str = json.dumps(m)
            video_list.append([video_id, json_str])
    return video_list


def h_timer_check():
    try:
        date = sys.argv[1]
        hour = sys.argv[2]
    except Exception as e:
        now_date = datetime.today()
        date = datetime.strftime(now_date, '%Y%m%d')
        hour = datetime.now().hour
        log_.info("没有读取到参数,采用系统时间:{}".format(e))
    log_.info("使用时间参数日期:{},小时:{}".format(date, str(hour)))
    # 1 判断上游数据表是否生产完成
    project = "loghubods"
    table = "alg_recsys_rank_root_info_v1"
    partition = str(date) + str(hour)
    table_data_cnt = check_data(project, table, partition)
    if table_data_cnt == 0:
        log_.info("上游数据{}未就绪{},等待...".format(table, partition))
        Timer(60, h_timer_check).start()
    else:
        log_.info("上游数据就绪,count={},开始读取数据表".format(table_data_cnt))
        # 2 读取数据表 处理特征
        # previous_date_str = (datetime.strptime(date, "%Y%m%d") - timedelta(days=1)).strftime("%Y%m%d")
        video_list = get_sql(partition, project)
        # 3 写入redis
        log_.info("video的数据量:{}".format(len(video_list)))
        records_process_for_list(video_list, process_and_store, max_size=50, num_workers=8)

        redis_helper.set_data_to_redis(REDIS_PREFIX + "partition", partition, 6 * 3600)




if __name__ == '__main__':
    log_.info("开始执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))
    h_timer_check()
    log_.info("完成执行:" + datetime.now().strftime("%Y-%m-%d %H:%M:%S"))




# cd /root/zhangbo/rov-offline
# python alg_recsys_recall_01_vid2titletags_redis.py 20240408 14