Bläddra i källkod

Merge branch 'feature_2023103115_liqian_ad_abtest'

liqian 1 år sedan
förälder
incheckning
4b29cff88c

+ 99 - 0
ad_arpu_update.py

@@ -0,0 +1,99 @@
+import datetime
+import json
+import traceback
+from threading import Timer
+from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
+from config import set_config
+from log import Log
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+features = [
+    'dt',
+    'dau',
+    'ad_own_view',  # 自营广告曝光次数
+    'ad_vx_view',  # 微信广告曝光次数
+]
+
+
+def update_ad_arpu(project, table, dt):
+    """更新上一周期的arpu值到redis中"""
+    user_group_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
+    user_group_initial_df['dau'].fillna(0, inplace=True)
+    user_group_initial_df['ad_own_view'].fillna(0, inplace=True)
+    user_group_initial_df['ad_vx_view'].fillna(0, inplace=True)
+    user_group_initial_df['dau'] = user_group_initial_df['dau'].astype(int)
+    user_group_initial_df['ad_own_view'] = user_group_initial_df['ad_own_view'].astype(int)
+    user_group_initial_df['ad_vx_view'] = user_group_initial_df['ad_vx_view'].astype(int)
+    dau = user_group_initial_df['dau'][0]
+    if dau == 0:
+        log_.info(f"数据异常,dau = {dau}")
+        return
+    ad_own_view = user_group_initial_df['ad_own_view'][0]
+    ad_vx_view = user_group_initial_df['ad_vx_view'][0]
+    ecpm = redis_helper.get_data_from_redis(key_name=config_.KEY_NAME_AD_ECPM)
+    if ecpm is None:
+        return
+    ecpm = json.loads(ecpm)
+    own_ecpm = ecpm.get('own', 0)
+    vx_ecpm = ecpm.get('weixin', 0)
+    if own_ecpm == 0 and vx_ecpm == 0:
+        return
+    # 计算上一周期广告收入
+    income = ad_own_view * float(own_ecpm) / 1000 + ad_vx_view * float(vx_ecpm) / 1000
+    # 计算上一周期arpu
+    arpu = income / dau
+    arpu = round(arpu, 4)
+    # 写入redis
+    if arpu >= 0:
+        redis_helper.set_data_to_redis(key_name=config_.KEY_NAME_AD_ARPU, value=arpu)
+        redis_helper.persist_key(key_name=config_.KEY_NAME_AD_ARPU)
+        data = {
+            'dau': dau,
+            'ad_own_view': ad_own_view,
+            'ad_vx_view': ad_vx_view,
+            'ecpm': ecpm,
+            'own_ecpm': own_ecpm,
+            'vx_ecpm': vx_ecpm,
+            'income': income,
+            'arpu': arpu
+        }
+        log_.info(f"data = {data}")
+        log_.info(f"update arpu finished!")
+
+
+def timer_check():
+    try:
+        project = 'loghubods'
+        table = 'dau_ad_view_hour'
+        now_date = datetime.datetime.today()
+        now_min = datetime.datetime.now().minute
+        dt = datetime.datetime.strftime(now_date - datetime.timedelta(hours=1), '%Y%m%d%H')
+        log_.info(f"now_date: {dt}")
+        # 查看当前更新的数据是否已准备好
+        data_count = data_check(project=project, table=table, dt=dt)
+        if data_count > 0:
+            # 数据准备好,进行更新
+            update_ad_arpu(project=project, table=table, dt=dt)
+            log_.info(f"ad arpu update end!")
+        elif now_min > 30:
+            log_.info('数据未准备好!')
+            return
+        else:
+            # 数据没准备好,1分钟后重新检查
+            Timer(60, timer_check).start()
+
+    except Exception as e:
+        log_.error(f"新策略 -- arpu值更新失败, exception: {e}, traceback: {traceback.format_exc()}")
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 新策略 -- arpu值更新失败\n"
+                     f"exception: {e}\n"
+                     f"traceback: {traceback.format_exc()}"
+        )
+
+
+if __name__ == '__main__':
+    timer_check()

+ 9 - 0
ad_arpu_update_task.sh

@@ -0,0 +1,9 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_arpu_update.py
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_arpu_update.py
+fi

+ 71 - 0
ad_ecpm_update.py

@@ -0,0 +1,71 @@
+import datetime
+import json
+import traceback
+
+from utils import request_get, send_msg_to_feishu
+from db_helper import RedisHelper
+from config import set_config
+from log import Log
+
+config_, _ = set_config()
+log_ = Log()
+
+
+def get_ad_ecpm():
+    """获取广告ecpm值"""
+    ad_ecpm = {}
+    result = request_get(request_url=config_.GET_AD_ECPM_URL)
+    print(result)
+    if result is None:
+        log_.info('获取广告ecpm值失败!')
+        return ad_ecpm
+    if result['code'] != 0:
+        log_.info('获取广告ecpm值失败!')
+        return ad_ecpm
+    if not result['data']:
+        return ad_ecpm
+    ad_ecpm = result['data']
+    print(ad_ecpm)
+    return ad_ecpm
+
+
+def update_ad_ecpm():
+    try:
+        now_date = datetime.datetime.today()
+        log_.info(f"now_date: {datetime.datetime.strftime(now_date, '%Y%m%d%H')}")
+
+        # 获取广告ecpm值
+        ad_ecpm = get_ad_ecpm()
+        log_.info(f"ad_ecpm: {ad_ecpm}")
+        if len(ad_ecpm) == 0:
+            send_msg_to_feishu(
+                webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+                key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+                msg_text=f"rov-offline{config_.ENV_TEXT} - 广告ecpm获取失败!"
+            )
+            return
+        redis_helper = RedisHelper()
+        redis_helper.set_data_to_redis(key_name=config_.KEY_NAME_AD_ECPM, value=json.dumps(ad_ecpm))
+        redis_helper.persist_key(key_name=config_.KEY_NAME_AD_ECPM)
+
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 广告ecpm更新完成!\nad_ecpm = {ad_ecpm}"
+
+        )
+        log_.info(f"ad ecpm update end!")
+
+    except Exception as e:
+        log_.error(f"广告ecpm更新失败, exception: {e}, traceback: {traceback.format_exc()}")
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 广告ecpm更新失败\n"
+                     f"exception: {e}\n"
+                     f"traceback: {traceback.format_exc()}"
+        )
+
+
+if __name__ == '__main__':
+    update_ad_ecpm()

+ 9 - 0
ad_ecpm_update_task.sh

@@ -0,0 +1,9 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_ecpm_update.py
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_ecpm_update.py
+fi

+ 22 - 0
ad_roi_param_update.py

@@ -0,0 +1,22 @@
+import datetime
+from db_helper import RedisHelper
+from config import set_config
+from log import Log
+
+config_, _ = set_config()
+log_ = Log()
+
+
+def update_ad_roi_param(param):
+    redis_helper = RedisHelper()
+    redis_helper.set_data_to_redis(key_name=config_.KEY_NAME_AD_ROI_PARAM, value=param)
+    redis_helper.persist_key(key_name=config_.KEY_NAME_AD_ROI_PARAM)
+
+
+if __name__ == '__main__':
+    now_date = datetime.datetime.today()
+    log_.info(f"now_date: {datetime.datetime.strftime(now_date, '%Y%m%d %H:%M:%S')}")
+    param = 5
+    log_.info(f"param = {param}")
+    update_ad_roi_param(param=param)
+    log_.info(f"ad roi param update finished!")

+ 166 - 0
ad_user_data_update_with_new_strategy.py

@@ -0,0 +1,166 @@
+import datetime
+import traceback
+from threading import Timer
+from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu
+from config import set_config
+from log import Log
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+features = [
+    'apptype',
+    'group',
+    'ad_type',  # 0: all, 1: 自营,2: 微信
+    'sharerate',  # 分享的概率
+    'no_ad_rate',  # 不出广告的概率
+    'no_adrate_share',  # 分享的情况下且不出广告的概率
+    'ad_rate',  # 出广告的概率
+    'adrate_share',  # 分享的情况下且出广告的概率
+]
+
+
+def predict_user_group_share_rate_with_ad(user_group_initial_df, dt, data_params, rule_params, param):
+    """预估用户组有广告时的分享率"""
+    # 获取对应的参数
+    data_key = param.get('data')
+    data_param = data_params.get(data_key)
+    rule_key = param.get('rule')
+    rule_param = rule_params.get(rule_key)
+
+    # 获取对应的用户组特征
+    user_group_df = user_group_initial_df.copy()
+    # print(user_group_df)
+    # 获取所有广告类型对应的数据
+    user_group_df['ad_type'] = user_group_df['ad_type'].astype(int)
+    user_group_df = user_group_df[user_group_df['ad_type'] == 0]
+    user_group_df['apptype'] = user_group_df['apptype'].astype(int)
+    user_group_df = user_group_df[user_group_df['apptype'] == data_param]
+    user_group_df['ad_rate'].fillna(0, inplace=True)
+    user_group_df['sharerate'].fillna(0, inplace=True)
+    user_group_df['adrate_share'].fillna(0, inplace=True)
+    user_group_df['ad_rate'] = user_group_df['ad_rate'].astype(float)
+    user_group_df['sharerate'] = user_group_df['sharerate'].astype(float)
+    user_group_df['adrate_share'] = user_group_df['adrate_share'].astype(float)
+
+    # 获取对应的用户分组数据
+    user_group_list = rule_param.get('group_list')
+    user_group_df = user_group_df[user_group_df['group'].isin(user_group_list)]
+
+    # 计算用户组有广告时分享率
+    user_group_df = user_group_df[user_group_df['ad_rate'] != 0]
+    user_group_df['group_ad_share_rate'] = \
+        user_group_df['adrate_share'] * user_group_df['sharerate'] / user_group_df['ad_rate']
+    user_group_df['group_ad_share_rate'].fillna(0, inplace=True)
+
+    # 结果写入redis
+    key_name = f"{config_.KEY_NAME_PREFIX_GROUP_WITH_AD}{data_key}:{rule_key}:{dt}"
+    redis_data = {}
+    for index, item in user_group_df.iterrows():
+        redis_data[item['group']] = item['group_ad_share_rate']
+    group_ad_share_rate_mean = user_group_df['group_ad_share_rate'].mean()
+    redis_data['mean_group'] = group_ad_share_rate_mean
+    if len(redis_data) > 0:
+        redis_helper = RedisHelper()
+        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
+    return user_group_df
+
+
+def predict_user_group_share_rate_no_ad(user_group_initial_df, dt, data_params, rule_params, param):
+    """预估用户组无广告时的分享率"""
+    # 获取对应的参数
+    data_key = param.get('data')
+    data_param = data_params.get(data_key)
+    rule_key = param.get('rule')
+    rule_param = rule_params.get(rule_key)
+
+    # 获取对应的用户组特征
+    user_group_df = user_group_initial_df.copy()
+    # 获取所有广告类型对应的数据
+    user_group_df['ad_type'] = user_group_df['ad_type'].astype(int)
+    user_group_df = user_group_df[user_group_df['ad_type'] == 0]
+    user_group_df['apptype'] = user_group_df['apptype'].astype(int)
+    user_group_df = user_group_df[user_group_df['apptype'] == data_param]
+    user_group_df['no_ad_rate'].fillna(0, inplace=True)
+    user_group_df['sharerate'].fillna(0, inplace=True)
+    user_group_df['no_adrate_share'].fillna(0, inplace=True)
+    user_group_df['no_ad_rate'] = user_group_df['no_ad_rate'].astype(float)
+    user_group_df['sharerate'] = user_group_df['sharerate'].astype(float)
+    user_group_df['no_adrate_share'] = user_group_df['no_adrate_share'].astype(float)
+
+    # 获取对应的用户分组数据
+    user_group_list = rule_param.get('group_list')
+    user_group_df = user_group_df[user_group_df['group'].isin(user_group_list)]
+
+    # 计算用户组有广告时分享率
+    user_group_df = user_group_df[user_group_df['no_ad_rate'] != 0]
+    user_group_df['group_no_ad_share_rate'] = \
+        user_group_df['no_adrate_share'] * user_group_df['sharerate'] / user_group_df['no_ad_rate']
+    user_group_df['group_no_ad_share_rate'].fillna(0, inplace=True)
+
+    # 结果写入redis
+    key_name = f"{config_.KEY_NAME_PREFIX_GROUP_NO_AD}{data_key}:{rule_key}:{dt}"
+    redis_data = {}
+    for index, item in user_group_df.iterrows():
+        redis_data[item['group']] = item['group_no_ad_share_rate']
+    group_ad_share_rate_mean = user_group_df['group_no_ad_share_rate'].mean()
+    redis_data['mean_group'] = group_ad_share_rate_mean
+    if len(redis_data) > 0:
+        redis_helper = RedisHelper()
+        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
+    return user_group_df
+
+
+def update_users_data(project, table, dt, update_params):
+    """预估用户组有广告时分享率"""
+    # 获取用户组特征
+    user_group_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
+    data_params = update_params.get('data_params')
+    rule_params = update_params.get('rule_params')
+    for param in update_params.get('params_list'):
+        log_.info(f"param = {param} update start...")
+        predict_user_group_share_rate_with_ad(user_group_initial_df=user_group_initial_df,
+                                              dt=dt,
+                                              data_params=data_params,
+                                              rule_params=rule_params,
+                                              param=param)
+        predict_user_group_share_rate_no_ad(user_group_initial_df=user_group_initial_df,
+                                            dt=dt,
+                                            data_params=data_params,
+                                            rule_params=rule_params,
+                                            param=param)
+        log_.info(f"param = {param} update end!")
+
+
+def timer_check():
+    try:
+        update_params = config_.AD_USER_PARAMS_NEW_STRATEGY
+        project = config_.ad_model_data['users_share_rate_new_strategy'].get('project')
+        table = config_.ad_model_data['users_share_rate_new_strategy'].get('table')
+        now_date = datetime.datetime.today()
+        dt = datetime.datetime.strftime(now_date, '%Y%m%d')
+        log_.info(f"now_date: {dt}")
+        # 查看当前更新的数据是否已准备好
+        data_count = data_check(project=project, table=table, dt=dt)
+        if data_count > 0:
+            log_.info(f"ad user group data count = {data_count}")
+            # 数据准备好,进行更新
+            update_users_data(project=project, table=table, dt=dt, update_params=update_params)
+            log_.info(f"ad user group data update end!")
+        else:
+            # 数据没准备好,1分钟后重新检查
+            Timer(60, timer_check).start()
+
+    except Exception as e:
+        log_.error(f"新策略 -- 用户组分享率预测数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
+        send_msg_to_feishu(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            msg_text=f"rov-offline{config_.ENV_TEXT} - 新策略 -- 用户组分享率预测数据更新失败\n"
+                     f"exception: {e}\n"
+                     f"traceback: {traceback.format_exc()}"
+        )
+
+
+if __name__ == '__main__':
+    timer_check()

+ 9 - 0
ad_user_data_update_with_new_strategy_task.sh

@@ -0,0 +1,9 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_user_data_update_with_new_strategy.py
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_user_data_update_with_new_strategy.py
+fi

+ 243 - 0
ad_video_data_update_with_new_strategy.py

@@ -0,0 +1,243 @@
+import datetime
+import traceback
+import multiprocessing
+from threading import Timer
+from utils import RedisHelper, data_check, get_feature_data, send_msg_to_feishu, send_msg_to_feishu_new
+from config import set_config
+from log import Log
+config_, _ = set_config()
+log_ = Log()
+redis_helper = RedisHelper()
+
+features = [
+    'apptype',
+    'videoid',
+    'ad_type',  # 0: all, 1: 自营,2: 微信
+    'sharerate',   # 被分享的概率
+    'no_ad_rate',  # 不出广告的概率
+    'no_adrate_share',  # 被分享的情况下且不出广告的概率
+    'ad_rate',  # 出广告的概率
+    'adrate_share',  # 被分享的情况下且出广告的概率
+]
+
+
+def get_top10_abnormal_videos_return(dt, filter_param):
+    """获取昨日各端top10中的异常视频(裂变视频)"""
+    abnormal_video_project = config_.ad_model_data['top10_videos'].get('project')
+    abnormal_video_table = config_.ad_model_data['top10_videos'].get('table')
+    abnormal_video_features = [
+        'apptype', 'videoid', 'yesterday_return', 'rank', 'multiple'
+    ]
+    data_count = data_check(project=abnormal_video_project, table=abnormal_video_table, dt=dt)
+    top10_abnormal_videos = {}
+    if data_count > 0:
+        abnormal_video_df = get_feature_data(project=abnormal_video_project, table=abnormal_video_table,
+                                             features=abnormal_video_features, dt=dt)
+        abnormal_video_df['multiple'].fillna(0, inplace=True)
+        abnormal_video_df['apptype'] = abnormal_video_df['apptype'].astype(int)
+        abnormal_video_df['videoid'] = abnormal_video_df['videoid'].astype(int)
+        abnormal_video_df['yesterday_return'] = abnormal_video_df['yesterday_return'].astype(int)
+        abnormal_video_df['rank'] = abnormal_video_df['rank'].astype(int)
+        abnormal_video_df['multiple'] = abnormal_video_df['multiple'].astype(float)
+        app_type_list = list(set(abnormal_video_df['apptype'].tolist()))
+        for app_type in app_type_list:
+            app_type_df = abnormal_video_df[abnormal_video_df['apptype'] == app_type]
+            app_type_df = app_type_df.sort_values(by=['rank'], ascending=True)
+            # print(app_type_df)
+            temp_video_id_list = []
+            for index, item in app_type_df.iterrows():
+                # print(item['rank'], item['videoid'], item['multiple'])
+                if item['multiple'] > filter_param:
+                    # print(item['videoid'], item['multiple'])
+                    abnormal_video_id_list = temp_video_id_list.copy()
+                    abnormal_video_id_list.append(int(item['videoid']))
+                    top10_abnormal_videos[app_type] = abnormal_video_id_list
+                    temp_video_id_list.append(int(item['videoid']))
+                else:
+                    temp_video_id_list.append(int(item['videoid']))
+            # print(top10_abnormal_videos)
+    log_.info(f"top10_abnormal_videos = {top10_abnormal_videos}")
+    return top10_abnormal_videos
+
+
+def predict_video_share_rate_with_ad(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
+    """预估视频有广告时被分享的概率"""
+    # 获取对应的视频特征
+    video_df = video_initial_df.copy()
+    # 获取所有广告类型对应的数据
+    video_df['ad_type'] = video_df['ad_type'].astype(int)
+    video_df = video_df[video_df['ad_type'] == 0]
+    video_df['apptype'] = video_df['apptype'].astype(int)
+    video_df = video_df[video_df['apptype'] == int(data_param)]
+    log_.info(f"video_df length: {len(video_df)}")
+    # print(video_df)
+    video_df['ad_rate'].fillna(0, inplace=True)
+    video_df['sharerate'].fillna(0, inplace=True)
+    video_df['adrate_share'].fillna(0, inplace=True)
+    video_df['ad_rate'] = video_df['ad_rate'].astype(float)
+    video_df['sharerate'] = video_df['sharerate'].astype(float)
+    video_df['adrate_share'] = video_df['adrate_share'].astype(float)
+
+    # 计算视频有广告时被分享率
+    video_df = video_df[video_df['ad_rate'] != 0]
+    # print(video_df)
+    video_df['video_ad_share_rate'] = \
+        video_df['adrate_share'] * video_df['sharerate'] / video_df['ad_rate']
+    video_df['video_ad_share_rate'].fillna(0, inplace=True)
+    # log_.info(f"video_df: {video_df}")
+    # video_df = video_df[video_df['video_ad_share_rate'] != 0]
+    log_.info(f"video_df filtered 0 length: {len(video_df)}")
+    # 结果写入redis
+    key_name = f"{config_.KEY_NAME_PREFIX_VIDEO_WITH_AD}{data_key}:{dt}"
+    redis_data = {}
+    for index, item in video_df.iterrows():
+        redis_data[int(item['videoid'])] = item['video_ad_share_rate']
+
+    # 剔除异常视频数据
+    video_df['videoid'] = video_df['videoid'].astype(int)
+    top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
+    if top10_abnormal_video_ids is not None:
+        video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
+    group_ad_share_rate_mean = video_df['video_ad_share_rate'].mean()
+    redis_data[-1] = group_ad_share_rate_mean
+    log_.info(f"redis_data count: {len(redis_data)}")
+    if len(redis_data) > 0:
+        redis_helper = RedisHelper()
+        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
+    return video_df
+
+
+def predict_video_share_rate_no_ad(video_initial_df, dt, data_key, data_param, top10_abnormal_videos):
+    """预估视频无广告时被分享的概率"""
+    # 获取对应的视频特征
+    video_df = video_initial_df.copy()
+    # 获取所有广告类型对应的数据
+    video_df['ad_type'] = video_df['ad_type'].astype(int)
+    video_df = video_df[video_df['ad_type'] == 0]
+    video_df['apptype'] = video_df['apptype'].astype(int)
+    video_df = video_df[video_df['apptype'] == int(data_param)]
+    log_.info(f"video_df length: {len(video_df)}")
+    video_df['no_ad_rate'].fillna(0, inplace=True)
+    video_df['sharerate'].fillna(0, inplace=True)
+    video_df['no_adrate_share'].fillna(0, inplace=True)
+    video_df['no_ad_rate'] = video_df['no_ad_rate'].astype(float)
+    video_df['sharerate'] = video_df['sharerate'].astype(float)
+    video_df['no_adrate_share'] = video_df['no_adrate_share'].astype(float)
+
+    # 计算视频无广告时被分享率
+    video_df = video_df[video_df['no_ad_rate'] != 0]
+    video_df['video_no_ad_share_rate'] = \
+        video_df['no_adrate_share'] * video_df['sharerate'] / video_df['no_ad_rate']
+    video_df['video_no_ad_share_rate'].fillna(0, inplace=True)
+    # log_.info(f"video_df: {video_df}")
+    # video_df = video_df[video_df['video_no_ad_share_rate'] != 0]
+    log_.info(f"video_df filtered 0 length: {len(video_df)}")
+    # 结果写入redis
+    key_name = f"{config_.KEY_NAME_PREFIX_VIDEO_NO_AD}{data_key}:{dt}"
+    redis_data = {}
+    for index, item in video_df.iterrows():
+        redis_data[int(item['videoid'])] = item['video_no_ad_share_rate']
+
+    # 剔除异常视频数据
+    video_df['videoid'] = video_df['videoid'].astype(int)
+    top10_abnormal_video_ids = top10_abnormal_videos.get(int(data_param), None)
+    if top10_abnormal_video_ids is not None:
+        video_df = video_df[~video_df['videoid'].isin(top10_abnormal_video_ids)]
+    group_ad_share_rate_mean = video_df['video_no_ad_share_rate'].mean()
+    redis_data[-1] = group_ad_share_rate_mean
+    log_.info(f"redis_data count: {len(redis_data)}")
+    if len(redis_data) > 0:
+        redis_helper = RedisHelper()
+        redis_helper.add_data_with_zset(key_name=key_name, data=redis_data, expire_time=2 * 24 * 3600)
+    return video_df
+
+
+def update_videos_data(project, table, dt, update_params, top10_abnormal_videos):
+    """预估视频有广告时分享率"""
+    # 获取视频特征
+    video_initial_df = get_feature_data(project=project, table=table, features=features, dt=dt)
+    for data_key, data_param in update_params.items():
+        log_.info(f"data_key = {data_key} update start...")
+        log_.info(f"predict_video_share_rate_with_ad start...")
+        predict_video_share_rate_with_ad(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
+                                         data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
+        log_.info(f"predict_video_share_rate_with_ad end!")
+
+        log_.info(f"predict_video_share_rate_no_ad start...")
+        predict_video_share_rate_no_ad(video_initial_df=video_initial_df, dt=dt, data_key=data_key,
+                                       data_param=data_param, top10_abnormal_videos=top10_abnormal_videos)
+        log_.info(f"predict_video_share_rate_no_ad end!")
+
+        log_.info(f"data_key = {data_key} update end!")
+
+
+def timer_check(dt, video_key, video_params, top10_abnormal_videos):
+    log_.info(f"video_key = {video_key}")
+    project = config_.ad_model_data[video_key].get('project')
+    table = config_.ad_model_data[video_key].get('table')
+    # 查看当前更新的数据是否已准备好
+    data_count = data_check(project=project, table=table, dt=dt)
+    if data_count > 0:
+        log_.info(f"ad video data count = {data_count}")
+        # 数据准备好,进行更新
+        update_videos_data(project=project, table=table, dt=dt, update_params=video_params,
+                           top10_abnormal_videos=top10_abnormal_videos)
+        log_.info(f"video_key = {video_key} ad video data update end!")
+        msg_list = [
+            f"env: rov-offline {config_.ENV_TEXT}",
+            f"video_key: {video_key}",
+            f"now_date: {dt}",
+            f"finished time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
+        ]
+        send_msg_to_feishu_new(
+            webhook=config_.FEISHU_ROBOT['ad_video_update_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['ad_video_update_robot'].get('key_word'),
+            title='新策略 -- 广告模型视频分享率预测数据更新完成',
+            msg_list=msg_list
+        )
+
+    else:
+        # 数据没准备好,1分钟后重新检查
+        Timer(60, timer_check, args=[dt, video_key, video_params, top10_abnormal_videos]).start()
+
+
+def main():
+    try:
+        now_date = datetime.datetime.today()
+        dt = datetime.datetime.strftime(now_date, '%Y%m%d')
+        log_.info(f"now_date: {dt}")
+        # 获取昨天top10中的异常视频(裂变视频)
+        top10_abnormal_videos = get_top10_abnormal_videos_return(
+            dt=dt, filter_param=config_.ad_model_data['top10_videos'].get('abnormal_filter_param')
+        )
+        update_params = config_.AD_VIDEO_DATA_PARAMS_NEW_STRATEGY
+        pool = multiprocessing.Pool(processes=len(update_params))
+        for video_key, video_params in update_params.items():
+            pool.apply_async(
+                func=timer_check,
+                args=(dt, video_key, video_params, top10_abnormal_videos)
+            )
+        pool.close()
+        pool.join()
+        # for video_key, video_params in update_params.items():
+        #     timer_check(dt, video_key, video_params, top10_abnormal_videos)
+
+    except Exception as e:
+        log_.error(f"新策略 -- 广告模型视频分享率预测数据更新失败, exception: {e}, traceback: {traceback.format_exc()}")
+        msg_list = [
+            f"env: rov-offline {config_.ENV_TEXT}",
+            f"now time: {datetime.datetime.strftime(datetime.datetime.now(), '%Y%m%d %H:%M:%S')}",
+            f"exception: {e}",
+            f"traceback: {traceback.format_exc()}",
+        ]
+        send_msg_to_feishu_new(
+            webhook=config_.FEISHU_ROBOT['server_robot'].get('webhook'),
+            key_word=config_.FEISHU_ROBOT['server_robot'].get('key_word'),
+            title='新策略 -- 广告模型视频分享率预测数据更新失败',
+            msg_list=msg_list
+        )
+
+
+if __name__ == '__main__':
+    # timer_check()
+    main()

+ 9 - 0
ad_video_data_update_with_new_strategy_task.sh

@@ -0,0 +1,9 @@
+source /etc/profile
+echo $ROV_OFFLINE_ENV
+if [[ $ROV_OFFLINE_ENV == 'test' ]]; then
+    cd /data2/rov-offline &&
+    /root/anaconda3/bin/python /data2/rov-offline/ad_video_data_update_with_new_strategy.py
+elif [[ $ROV_OFFLINE_ENV == 'pro' ]]; then
+    cd /data/rov-offline &&
+    /root/anaconda3/bin/python /data/rov-offline/ad_video_data_update_with_new_strategy.py
+fi

+ 74 - 0
config.py

@@ -821,6 +821,15 @@ class BaseConfig(object):
             'project': 'loghubods',
             'table': 'video_data_with_out_admodel_alladtype'
         },  # 以是否直接跳出为目标的视频侧数据:所有广告类型数据,按照videoId统计(各视频出广告的概率,各视频被直接跳出的概率,各视频被直接跳出的情况下出广告的概率)
+
+        'videos_share_rate_new_strategy': {
+            'project': 'loghubods',
+            'table': 'video_data_with_ad_sharerate_adtype'
+        },  # 新策略使用视频侧数据:所有广告类型数据,按照videoId统计(各视频被分享的概率,各视频出广告的概率,各视频被分享的情况下出广告的概率,各视频不出广告的概率,各视频被分享的情况下不出广告的概率)
+        'users_share_rate_new_strategy': {
+            'project': 'loghubods',
+            'table': 'usergroup_data_with_ad_sharerate_adtype'
+        },  # 新策略使用用户侧数据:按照用户分组统计(各用户组的分享率,各用户组出广告的概率,各用户组有分享的情况下出广告的概率,各用户组不出广告的概率,各用户组有分享的情况下不出广告的概率)
     }
 
     # 自动调整广告模型阈值数据
@@ -896,6 +905,21 @@ class BaseConfig(object):
         },
     }
 
+    AD_VIDEO_DATA_PARAMS_NEW_STRATEGY = {
+        # 所有广告类型视频数据
+        'videos_share_rate_new_strategy': {
+            'videos0': APP_TYPE['VLOG'],  # vlog
+            'videos4': APP_TYPE['LOVE_LIVE'],  # 票圈视频
+            'videos6': APP_TYPE['SHORT_VIDEO'],  # 票圈短视频
+            'videos5': APP_TYPE['LONG_VIDEO'],  # 内容精选
+            'videos21': APP_TYPE['PIAO_QUAN_VIDEO_PLUS'],  # 票圈视频+
+            'videos3': APP_TYPE['BLESSING_YEAR'],  # 票圈福年
+            'videos22': APP_TYPE['JOURNEY'],  # 票圈足迹
+            'videos18': APP_TYPE['LAO_HAO_KAN_VIDEO'],  # 老好看视频
+            'videos19': APP_TYPE['ZUI_JING_QI'],  # 票圈最惊奇
+        },
+    }
+
     # 广告模型异常视频数据处理参数
     AD_ABNORMAL_VIDEOS_PARAM = {
         'data1': 17/48,  # vlog
@@ -1085,6 +1109,31 @@ class BaseConfig(object):
         ]
     }
 
+    # 新策略使用 - 广告模型用户数据
+    AD_USER_PARAMS_NEW_STRATEGY = {
+        'data_params': {
+            'user0': APP_TYPE['VLOG'],  # vlog
+            'user3': APP_TYPE['BLESSING_YEAR'],  # 票圈福年
+            'user4': APP_TYPE['LOVE_LIVE'],  # 票圈视频
+            'user5': APP_TYPE['LONG_VIDEO'],  # 内容精选
+            'user6': APP_TYPE['SHORT_VIDEO'],  # 票圈短视频
+            'user18': APP_TYPE['LAO_HAO_KAN_VIDEO'],  # 老好看视频
+            'user19': APP_TYPE['ZUI_JING_QI'],  # 票圈最惊奇
+            'user21': APP_TYPE['PIAO_QUAN_VIDEO_PLUS'],  # 票圈视频+
+            'user22': APP_TYPE['JOURNEY'],  # 票圈足迹
+        },
+        'rule_params': {
+            'rule1': {
+                'group_list': AD_MID_GROUP['class1'],
+            },
+        },
+        'params_list': [
+            {'data': 'user0', 'rule': 'rule1'},  # 票圈vlog + 优化阈值计算方式
+            {'data': 'user4', 'rule': 'rule1'},  # 票圈视频 + 优化阈值计算方式
+            {'data': 'user5', 'rule': 'rule1'},  # 内容精选 + 优化阈值计算方式
+        ]
+    }
+
     # 广告模型abtest配置
     AD_ABTEST_CONFIG = {
         # 票圈vlog
@@ -2224,6 +2273,23 @@ class BaseConfig(object):
     # 广告推荐自动调整阈值参数记录存放 redis key,完整格式:ad:threshold:param:record
     KEY_NAME_PREFIX_AD_THRESHOLD_PARAM_RECORD = 'ad:threshold:param:record'
 
+    # 新策略使用
+    # 视频有广告时的分享率预测结果存放 redis key 前缀,完整格式:video:predict:share:rate:with:ad:{video_data_key}:{date}
+    KEY_NAME_PREFIX_VIDEO_WITH_AD = 'video:predict:share:rate:with:ad:'
+    # 视频无广告时的分享率预测结果存放 redis key 前缀,完整格式:video:predict:share:rate:no:ad:{video_data_key}:{date}
+    KEY_NAME_PREFIX_VIDEO_NO_AD = 'video:predict:share:rate:no:ad:'
+    # 用户组有广告时的分享率预测结果存放 redis key 前缀,完整格式:users:group:predict:share:rate:with:ad:{user_data_key}:{user_rule_key}:{date}
+    KEY_NAME_PREFIX_GROUP_WITH_AD = 'users:group:predict:share:rate:with:ad:'
+    # 用户组无广告时的分享率预测结果存放 redis key 前缀,完整格式:users:group:predict:share:rate:no:ad:{user_data_key}:{user_rule_key}:{date}
+    KEY_NAME_PREFIX_GROUP_NO_AD = 'users:group:predict:share:rate:no:ad:'
+
+    # 自营广告及微信广告ecpm值存放 redis key
+    KEY_NAME_AD_ECPM = 'ad:ecpm'
+    # 上一周期arpu值结果存放 redis key
+    KEY_NAME_AD_ARPU = 'ad:arpu'
+    # 计算roi使用参数存放 redis key
+    KEY_NAME_AD_ROI_PARAM = 'ad:roi:param'
+
 
 class DevelopmentConfig(BaseConfig):
     """开发环境配置"""
@@ -2300,6 +2366,8 @@ class DevelopmentConfig(BaseConfig):
     GET_VIDEO_LIMIT_LIST_URL = 'http://videotest-internal.yishihui.com/longvideoapi/openapi/recommend/getVideoLimitList'
     # 获取管理后台设置的广告目标uv值接口地址
     GET_AD_TARGET_UV_URL = 'https://testadmin.piaoquantv.com/manager/ad/algo/threshold/productUvTargetList'
+    # 获取广告ecpm值接口地址
+    GET_AD_ECPM_URL = 'https://testapi.piaoquantv.com/ad/getAdEcpmInfo'
 
     # # logs 上传oss 目标Bucket指定目录
     # OSS_FOLDER_LOGS = 'rov-offline/dev/logs/'
@@ -2382,6 +2450,8 @@ class TestConfig(BaseConfig):
     GET_VIDEO_LIMIT_LIST_URL = 'http://videotest-internal.yishihui.com/longvideoapi/openapi/recommend/getVideoLimitList'
     # 获取管理后台设置的广告目标uv值接口地址
     GET_AD_TARGET_UV_URL = 'https://testadmin.piaoquantv.com/manager/ad/algo/threshold/productUvTargetList'
+    # 获取广告ecpm值接口地址
+    GET_AD_ECPM_URL = 'https://testapi.piaoquantv.com/ad/getAdEcpmInfo'
 
     # # logs 上传oss 目标Bucket指定目录
     # OSS_FOLDER_LOGS = 'rov-offline/test/logs/'
@@ -2464,6 +2534,8 @@ class PreProductionConfig(BaseConfig):
     GET_VIDEO_LIMIT_LIST_URL = 'http://prespeed-internal.piaoquantv.com/longvideoapi/openapi/recommend/getVideoLimitList'
     # 获取管理后台设置的广告目标uv值接口地址
     GET_AD_TARGET_UV_URL = 'https://preadmin.piaoquantv.com/manager/ad/algo/threshold/productUvTargetList'
+    # 获取广告ecpm值接口地址
+    GET_AD_ECPM_URL = 'https://preapi.piaoquantv.com/ad/getAdEcpmInfo'
 
     # # logs 上传oss 目标Bucket指定目录
     # OSS_FOLDER_LOGS = 'rov-offline/pre/logs/'
@@ -2546,6 +2618,8 @@ class ProductionConfig(BaseConfig):
     GET_VIDEO_LIMIT_LIST_URL = 'http://recommend-common-internal.piaoquantv.com/longvideoapi/openapi/recommend/getVideoLimitList'
     # 获取管理后台设置的广告目标uv值接口地址
     GET_AD_TARGET_UV_URL = 'https://admin.piaoquantv.com/manager/ad/algo/threshold/productUvTargetList'
+    # 获取广告ecpm值接口地址
+    GET_AD_ECPM_URL = 'https://api.piaoquantv.com/ad/getAdEcpmInfo'
 
     # # logs 上传oss 目标Bucket指定目录
     # OSS_FOLDER_LOGS = 'rov-offline/pro/logs/'