import datetime
import traceback
import multiprocessing
from threading import Timer
from my_utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu, send_msg_to_feishu_new
from my_config import set_config
from log import Log
config_, _ = set_config()
log_ = Log()
redis_helper = RedisHelper()

features = [
    'apptype',
    'videoid',
    'sharerate_all',
    'sharerate_ad'
]

features_new = [
    'apptype',
    'videoid',
    'adrate',
    'sharerate',
    'adrate_share'
]

features_with_out = [
    'apptype',
    'videoid',
    'adrate',  # 出广告的概率
    'outrate',  # 被直接跳出的概率
    'adrate_out'  # 被直接跳出时出广告的概率
]


def get_top10_abnormal_videos_return(dt, filter_param):
    """获取昨日各端top10中的异常视频(裂变视频)"""
    abnormal_video_project = config_.ad_model_data['top10_videos'].get('project')
    abnormal_video_table = config_.ad_model_data['top10_videos'].get('table')
    abnormal_video_features = [
        'apptype', 'videoid', 'yesterday_return', 'rank', 'multiple'
    ]
    data_count = data_check(project=abnormal_video_project, table=abnormal_video_table, dt=dt)
    top10_abnormal_videos = {}
    if data_count > 0:
        abnormal_video_df = get_feature_data(project=abnormal_video_project, table=abnormal_video_table,
                                             features=abnormal_video_features, dt=dt)
        abnormal_video_df['multiple'].fillna(0, inplace=True)
        abnormal_video_df['apptype'] = abnormal_video_df['apptype'].astype(int)
        abnormal_video_df['videoid'] = abnormal_video_df['videoid'].astype(int)
        abnormal_video_df['yesterday_return'] = abnormal_video_df['yesterday_return'].astype(int)
        abnormal_video_df['rank'] = abnormal_video_df['rank'].astype(int)
        abnormal_video_df['multiple'] = abnormal_video_df['multiple'].astype(float)
        app_type_list = list(set(abnormal_video_df['apptype'].tolist()))
        for app_type in app_type_list:
            app_type_df = abnormal_video_df[abnormal_video_df['apptype'] == app_type]
            app_type_df = app_type_df.sort_values(by=['rank'], ascending=True)
            # print(app_type_df)
            temp_video_id_list = []
            for index, item in app_type_df.iterrows():
                # print(item['rank'], item['videoid'], item['multiple'])
                if item['multiple'] > filter_param:
                    # print(item['videoid'], item['multiple'])
                    abnormal_video_id_list = temp_video_id_list.copy()
                    abnormal_video_id_list.append(int(item['videoid']))
                    top10_abnormal_videos[app_type] = abnormal_video_id_list
                    temp_video_id_list.append(int(item['videoid']))
                else:
                    temp_video_id_list.append(int(item['videoid']))
            # print(top10_abnormal_videos)
    log_.info(f"top10_abnormal_videos = {top10_abnormal_videos}")
    return top10_abnormal_videos


def predict_video_share_rate(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
    """预估视频有广告时分享率"""
    # 获取对应的视频特征
    video_df = video_initial_df.copy()
    video_df['apptype'] = video_df['apptype'].astype(int)
    video_df = video_df[video_df['apptype'] == int(data_param)]
    log_.info(f"video_df length: {len(video_df)}")
    video_df['sharerate_all'].fillna(0, inplace=True)
    video_df['sharerate_ad'].fillna(0, inplace=True)
    video_df['sharerate_all'] = video_df['sharerate_all'].astype(float)
    video_df['sharerate_ad'] = video_df['sharerate_ad'].astype(float)
    # 获取有广告时所有视频近30天的分享率
    ad_all_videos_share_rate = video_df[video_df['videoid'] == 'allvideos']['sharerate_ad'].values[0]
    video_df = video_df[video_df['videoid'] != 'allvideos']
    # 剔除异常视频数据
    video_df['videoid'] = video_df['videoid'].astype(int)
    top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
    # print(int(data_param), len(video_df), top10_abnormal_video_ids)
    if top10_abnormal_video_ids is not None:
        video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
        # print(len(video_df))
    # 计算视频有广告时分享率
    video_df['video_ad_share_rate'] = \
        video_df['sharerate_ad'] * float(ad_all_videos_share_rate) / video_df['sharerate_all']
    video_df['video_ad_share_rate'].fillna(0, inplace=True)
    # log_.info(f"video_df: {video_df}")
    video_df = video_df[video_df['video_ad_share_rate'] != 0]
    log_.info(f"video_df filtered 0 length: {len(video_df)}")
    # 结果写入redis
    key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{data_key}:{dt}"
    redis_data = {}
    for index, item in video_df.iterrows():
        redis_data[int(item['videoid'])] = item['video_ad_share_rate']
    group_ad_share_rate_mean = video_df['video_ad_share_rate'].mean()
    redis_data[-1] = group_ad_share_rate_mean
    # 异常视频给定值:mean/3
    if top10_abnormal_video_ids is not None:
        abnormal_video_param = config_.AD_ABNORMAL_VIDEOS_PARAM.get(data_key, 1)
        print(data_key, data_param, abnormal_video_param)
        for abnormal_video_id in top10_abnormal_video_ids:
            print(abnormal_video_id, group_ad_share_rate_mean, group_ad_share_rate_mean * abnormal_video_param)
            redis_data[int(abnormal_video_id)] = group_ad_share_rate_mean * abnormal_video_param
    log_.info(f"redis_data count: {len(redis_data)}")
    if len(redis_data) > 0:
        redis_helper = RedisHelper()
        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
    return video_df


def update_videos_data(project, table, dt, update_params, top10_abnormal_videos):
    """预估视频有广告时分享率"""
    # 获取视频特征
    video_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
    for data_key, data_param in update_params.items():
        log_.info(f"data_key = {data_key} update start...")
        predict_video_share_rate(video_initial_df=video_initial_df, dt=dt, data_key=data_key, data_param=data_param,
                                 top10_abnormal_videos=top10_abnormal_videos)
        log_.info(f"data_key = {data_key} update end!")


def predict_video_share_rate_new(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
    """预估视频有广告时被分享率"""
    # 获取对应的视频特征
    video_df = video_initial_df.copy()
    video_df['apptype'] = video_df['apptype'].astype(int)
    video_df = video_df[video_df['apptype'] == int(data_param)]
    log_.info(f"video_df length: {len(video_df)}")
    video_df['adrate'].fillna(0, inplace=True)
    video_df['sharerate'].fillna(0, inplace=True)
    video_df['adrate_share'].fillna(0, inplace=True)
    video_df['adrate'] = video_df['adrate'].astype(float)
    video_df['sharerate'] = video_df['sharerate'].astype(float)
    video_df['adrate_share'] = video_df['adrate_share'].astype(float)

    # 剔除异常视频数据
    video_df['videoid'] = video_df['videoid'].astype(int)
    top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
    # print(int(data_param), len(video_df), top10_abnormal_video_ids)
    if top10_abnormal_video_ids is not None:
        video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
        # print(len(video_df))
    # 计算视频有广告时被分享率
    video_df = video_df[video_df['adrate'] != 0]
    video_df['video_ad_share_rate'] = \
        video_df['adrate_share'] * video_df['sharerate'] / video_df['adrate']
    video_df['video_ad_share_rate'].fillna(0, inplace=True)
    # log_.info(f"video_df: {video_df}")
    video_df = video_df[video_df['video_ad_share_rate'] != 0]
    log_.info(f"video_df filtered 0 length: {len(video_df)}")
    # 结果写入redis
    key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{data_key}:{dt}"
    redis_data = {}
    for index, item in video_df.iterrows():
        redis_data[int(item['videoid'])] = item['video_ad_share_rate']
    group_ad_share_rate_mean = video_df['video_ad_share_rate'].mean()
    redis_data[-1] = group_ad_share_rate_mean
    # 异常视频给定值:mean/3
    if top10_abnormal_video_ids is not None:
        abnormal_video_param = config_.AD_ABNORMAL_VIDEOS_PARAM.get(data_key, 1)
        print(data_key, data_param, abnormal_video_param)
        for abnormal_video_id in top10_abnormal_video_ids:
            print(abnormal_video_id, group_ad_share_rate_mean, group_ad_share_rate_mean * abnormal_video_param)
            redis_data[int(abnormal_video_id)] = group_ad_share_rate_mean * abnormal_video_param
    log_.info(f"redis_data count: {len(redis_data)}")
    if len(redis_data) > 0:
        redis_helper = RedisHelper()
        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
    return video_df


def update_videos_data_new(project, table, dt, update_params, top10_abnormal_videos):
    """预估视频有广告时分享率"""
    # 获取视频特征
    video_initial_df = get_feature_data(project=project, table=table, features=features_new, dt=dt)
    for data_key, data_param in update_params.items():
        log_.info(f"data_key = {data_key} update start...")
        predict_video_share_rate_new(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
                                     data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
        log_.info(f"data_key = {data_key} update end!")


def predict_video_out_rate(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
    """预估视频有广告时不被直接跳出的概率"""
    # 获取对应的视频特征
    video_df = video_initial_df.copy()
    video_df['apptype'] = video_df['apptype'].astype(int)
    video_df = video_df[video_df['apptype'] == int(data_param)]
    log_.info(f"video_df length: {len(video_df)}")
    video_df['adrate'].fillna(0, inplace=True)
    video_df['outrate'].fillna(0, inplace=True)
    video_df['adrate_out'].fillna(0, inplace=True)
    video_df['adrate'] = video_df['adrate'].astype(float)
    video_df['outrate'] = video_df['outrate'].astype(float)
    video_df['adrate_out'] = video_df['adrate_out'].astype(float)

    # 剔除异常视频数据
    video_df['videoid'] = video_df['videoid'].astype(int)
    top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
    # print(int(data_param), len(video_df), top10_abnormal_video_ids)
    if top10_abnormal_video_ids is not None:
        video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
        # print(len(video_df))
    # 计算视频有广告时被直接跳出的概率
    video_df = video_df[video_df['adrate'] != 0]
    video_df = video_df[video_df['adrate_out'] != 0]
    video_df['video_ad_out_rate'] = \
        video_df['adrate_out'] * video_df['outrate'] / video_df['adrate']
    video_df['video_ad_out_rate'].fillna(0, inplace=True)
    # 计算视频有广告时不被直接跳出的概率
    video_df['video_ad_no_out_rate'] = 1 - video_df['video_ad_out_rate']
    # print(len(video_df))
    # video_df = video_df[video_df['video_ad_no_out_rate'] != 0]
    # log_.info(f"video_df: {video_df}")
    log_.info(f"video_df filtered 0 length: {len(video_df)}")
    # video_df = video_df[video_df['video_ad_no_out_rate'] != 1]
    # log_.info(f"video_df: {video_df}")
    # log_.info(f"video_df filtered 0 length: {len(video_df)}")
    # 结果写入redis
    key_name = f"{config_.KEY_NAME_PREFIX_AD_VIDEO}{data_key}:{dt}"
    redis_data = {}
    for index, item in video_df.iterrows():
        redis_data[int(item['videoid'])] = item['video_ad_no_out_rate']
    group_ad_out_rate_mean = video_df['video_ad_no_out_rate'].mean()
    redis_data[-1] = group_ad_out_rate_mean
    # 异常视频给定值:mean/3
    if top10_abnormal_video_ids is not None:
        abnormal_video_param = config_.AD_ABNORMAL_VIDEOS_PARAM.get(data_key, 1)
        print(data_key, data_param, abnormal_video_param)
        for abnormal_video_id in top10_abnormal_video_ids:
            print(abnormal_video_id, group_ad_out_rate_mean, group_ad_out_rate_mean * abnormal_video_param)
            redis_data[int(abnormal_video_id)] = group_ad_out_rate_mean * abnormal_video_param
    log_.info(f"redis_data count: {len(redis_data)}")
    if len(redis_data) > 0:
        redis_helper = RedisHelper()
        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
    return video_df


def update_videos_data_with_out(project, table, dt, update_params, top10_abnormal_videos):
    """预估视频有广告时被直接跳出的概率"""
    # 获取视频特征
    video_initial_df = get_feature_data(project=project, table=table, features=features_with_out, dt=dt)
    for data_key, data_param in update_params.items():
        log_.info(f"data_key = {data_key} update start...")
        predict_video_out_rate(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
                               data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
        log_.info(f"data_key = {data_key} update end!")


def timer_check(dt, video_key, video_params, top10_abnormal_videos):
    log_.info(f"video_key = {video_key}")
    project = config_.ad_model_data[video_key].get('project')
    table = config_.ad_model_data[video_key].get('table')
    # 查看当前更新的数据是否已准备好
    data_count = data_check(project=project, table=table, dt=dt)
    if data_count > 0:
        log_.info(f"ad video data count = {data_count}")
        # 数据准备好,进行更新
        if video_key == 'videos_data_alladtype':
            update_videos_data_new(project=project, table=table, dt=dt, update_params=video_params,
                                   top10_abnormal_videos=top10_abnormal_videos)
        elif video_key == 'videos_data_with_out_alladtype':
            update_videos_data_with_out(project=project, table=table, dt=dt, update_params=video_params,
                                        top10_abnormal_videos=top10_abnormal_videos)
        else:
            update_videos_data(project=project, table=table, dt=dt, update_params=video_params,
                               top10_abnormal_videos=top10_abnormal_videos)
        log_.info(f"video_key = {video_key} ad video data update end!")
        msg_list = [
            f"env: rov-offline {config_.ENV_TEXT}",
            f"video_key: {video_key}",
            f"now_date: {dt}",
            f"finished time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
        ]
        send_msg_to_feishu_new(
            webhook=config_.FEISHU_ROBOT['ad_video_update_robot'].get('webhook'),
            key_word=config_.FEISHU_ROBOT['ad_video_update_robot'].get('key_word'),
            title='广告模型视频分享率预测数据更新完成',
            msg_list=msg_list
        )

    else:
        # 数据没准备好,1分钟后重新检查
        Timer(60, timer_check, args=[dt, video_key, video_params, top10_abnormal_videos]).start()


def main():
    try:
        now_date = datetime.datetime.today()
        dt = datetime.datetime.strftime(now_date, '%Y%m%d')
        log_.info(f"now_date: {dt}")
        # 获取昨天top10中的异常视频(裂变视频)
        # top10_abnormal_videos = get_top10_abnormal_videos_return(
        #     dt=dt, filter_param=config_.ad_model_data['top10_videos'].get('abnormal_filter_param')
        # )
        # 暂停1.5倍回流视频减少广告策略
        top10_abnormal_videos = {}
        update_params = config_.AD_VIDEO_DATA_PARAMS
        pool = multiprocessing.Pool(processes=len(update_params))
        for video_key, video_params in update_params.items():
            pool.apply_async(
                func=timer_check,
                args=(dt, video_key, video_params, top10_abnormal_videos)
            )
        pool.close()
        pool.join()
        # for video_key, video_params in update_params.items():
        #     timer_check(dt, video_key, video_params, top10_abnormal_videos)

    except Exception as e:
        log_.error(f"视频分享率预测数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
        msg_list = [
            f"env: rov-offline {config_.ENV_TEXT}",
            f"now time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
            f"exception: {e}",
            f"traceback: {traceback.format_exc()}",
        ]
        send_msg_to_feishu_new(
            webhook=config_.FEISHU_ROBOT['ad_video_update_robot'].get('webhook'),
            key_word=config_.FEISHU_ROBOT['ad_video_update_robot'].get('key_word'),
            title='广告模型视频分享率预测数据更新失败',
            msg_list=msg_list
        )


if __name__ == '__main__':
    # timer_check()
    main()