123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194 |
- #! /usr/bin/env python
- # -*- coding: utf-8 -*-
- # vim:fenc=utf-8
- #
- # Copyright © 2024 StrayWarrior <i@straywarrior.com>
- import sys
- import os
- sys.path.append(os.path.join(os.path.dirname(__file__), 'src'))
- import time
- import json
- from datetime import datetime, timedelta
- import pandas as pd
- from argparse import ArgumentParser
- from long_articles.category_models import CategoryRegressionV1
- from long_articles.consts import category_feature_v2, category_name_map_v2, reverse_category_name_map_v2
- from common.database import MySQLManager
- from common import db_operation
- from common.logging import LOG
- from config.dev import Config
- NIGHT_ACCOUNTS = ('gh_12523d39d809','gh_df4a630c04db','gh_f67df16f4670','gh_ca44517edda9','gh_a66c1316fd5e','gh_4242c478bbba','gh_60b0c23fcc7c','gh_33b3470784fc','gh_ec1bcb283daf','gh_234ab9ff490d','gh_7715a626a4c6','gh_1bfe1d257728','gh_9db5e3ac2c93','gh_9d1ae5f9ceac','gh_7208b813f16d','gh_e56ddf195d91','gh_a43aecffe81b','gh_d4a7d2ce54fd','gh_c2b458818b09','gh_349a57ef1c44','gh_89bfe54ad90f','gh_b929ed680b62','gh_f8e8a931ff56','gh_916f4fad5ce0','gh_0d7c5f4c38a9','gh_bceef3f747c2','gh_706456719017','gh_fd51a5e33fc6','gh_5372093f5fb0','gh_957ff8e08e1b','gh_64fc629d3ec2','gh_c8b69797912a','gh_6909b38ad95f','gh_1e69a1b4dc1a','gh_0763523103e4','gh_9b83a9ad7da0','gh_82b416f27698','gh_a60647e98cd9','gh_3ce2fa1956ea','gh_44127c197525','gh_06834aba13a5','gh_c33809af68bc','gh_82cf39ef616e','gh_a342ef23c48e','gh_c9cc1471af7d','gh_291ec369f017','gh_810a439f320a','gh_00f942061a0d','gh_7662653b0e77','gh_d192d757b606','gh_391702d26b3b','gh_3e90f421c974','gh_30d189fe56c7','gh_7ebfbbf675ee','gh_3f84c2b9a1a2','gh_bccbe3681e22','gh_005fc1cb4b73','gh_21d120007b64','gh_3d5f24fd3311','gh_3621aaa6c4a0','gh_aee2dca32701','gh_c25c6040c4b2','gh_641019d44876','gh_95ba63e5cf18','gh_efd90dcf48ac','gh_5e1464b76ff6','gh_5765f834684c','gh_81bec2f4f577','gh_401396006e13','gh_7c33726c5147','gh_bbd8a52ba98b','gh_f74ca3104604'
- )
- def prepare_raw_data(dt_begin, dt_end):
- data_fields = ['dt', 'gh_id', 'account_name', 'title', 'similarity',
- 'view_count_rate', 'category', 'read_avg',
- 'read_avg_rate', 'first_pub_interval', '`index`']
- fields_str = ','.join(data_fields)
- db_manager = MySQLManager(Config().MYSQL_LONG_ARTICLES)
- night_accounts_condition = str(NIGHT_ACCOUNTS)
- sql = f"""
- SELECT {fields_str} FROM datastat_score WHERE dt BETWEEN {dt_begin} AND {dt_end}
- AND similarity > 0 AND category IS NOT NULL AND read_avg > 500
- AND read_avg_rate BETWEEN 0.3 AND 3 AND view_count_rate > 0
- AND `index` in (1, 2)
- AND (FROM_UNIXTIME(coalesce(publish_timestamp, 0), '%H') < '15'
- OR gh_id in {night_accounts_condition})
- AND dt NOT BETWEEN 20250105 AND 20250215
- """
- rows = db_manager.select(sql)
- df = pd.DataFrame(rows, columns=data_fields)
- df.rename(columns={'`index`': 'index'}, inplace=True)
- df = df.drop_duplicates(['dt', 'gh_id', 'title'])
- return df
- def clear_old_version(db_manager, dt):
- update_timestamp = int(time.time())
- sql = f"""
- UPDATE account_category
- SET status = 0, update_timestamp = {update_timestamp}
- WHERE dt < {dt} and status = 1 and version = 2
- """
- rows = db_manager.execute(sql)
- print(f"updated rows for clear: {rows}")
- def get_last_version(db_manager, dt):
- sql = f"""
- SELECT gh_id, category_map
- FROM account_category
- WHERE dt = (SELECT max(dt) FROM account_category WHERE dt < {dt} AND
- status = 1)
- """
- data = db_manager.select(sql)
- return data
- def compare_version(db_manager, dt_version, new_version, account_id_map):
- last_version = get_last_version(db_manager, dt_version)
- last_version = { entry[0]: json.loads(entry[1]) for entry in last_version }
- new_version = { entry['gh_id']: json.loads(entry['category_map']) for entry in new_version }
- # new record
- all_gh_ids = set(list(new_version.keys()) + list(last_version.keys()))
- for gh_id in all_gh_ids:
- account_name = account_id_map.get(gh_id, None)
- if gh_id not in last_version:
- print(f"new account {account_name}: {new_version[gh_id]}")
- elif gh_id not in new_version:
- print(f"old account {account_name}: {last_version[gh_id]}")
- else:
- new_cates = new_version[gh_id]
- old_cates = last_version[gh_id]
- for cate in new_cates:
- if cate not in old_cates:
- print(f"account {account_name} new cate: {cate} {new_cates[cate]}")
- for cate in old_cates:
- if cate not in new_cates:
- print(f"account {account_name} old cate: {cate} {old_cates[cate]}")
- def main():
- parser = ArgumentParser()
- parser.add_argument('-n', '--dry-run', action='store_true', help='do not update database')
- parser.add_argument('--run-at', help='dt, also for version')
- parser.add_argument('--print-matrix', action='store_true')
- parser.add_argument('--print-residual', action='store_true')
- args = parser.parse_args()
- run_date = datetime.today()
- if args.run_at:
- run_date = datetime.strptime(args.run_at, "%Y%m%d")
- begin_dt = 20240914
- end_dt = (run_date - timedelta(1)).strftime("%Y%m%d")
- dt_version = end_dt
- LOG.info(f"data range: {begin_dt} - {end_dt}")
- raw_df = prepare_raw_data(begin_dt, end_dt)
- cate_model = CategoryRegressionV1(category_feature_v2, category_name_map_v2)
- df = cate_model.preprocess_data(raw_df)
- if args.dry_run and args.print_matrix:
- cate_model.build_and_print_matrix(df)
- return
- create_timestamp = int(time.time())
- update_timestamp = create_timestamp
- records_to_save = []
- param_to_category_map = reverse_category_name_map_v2
- account_ids = df['gh_id'].unique()
- account_id_map = df[['account_name', 'gh_id']].drop_duplicates() \
- .set_index('gh_id')['account_name'].to_dict()
- account_negative_cates = {k: [] for k in account_ids}
- P_VALUE_THRESHOLD = 0.15
- for account_id in account_ids:
- sub_df = df[df['gh_id'] == account_id]
- account_name = account_id_map[account_id]
- sample_count = len(sub_df)
- if sample_count < 5:
- continue
- params, t_stats, p_values = cate_model.run_ols_linear_regression(
- sub_df, args.print_residual, P_VALUE_THRESHOLD)
- current_record = {}
- current_record['dt'] = dt_version
- current_record['gh_id'] = account_id
- current_record['category_map'] = {}
- param_names = cate_model.get_param_names()
- for name, param, p_value in zip(param_names, params, p_values):
- cate_name = param_to_category_map.get(name, None)
- # 用于排序的品类相关性
- if abs(param) > 0.1 and p_value < P_VALUE_THRESHOLD and cate_name is not None:
- scale_factor = min(0.1 / p_value, 1)
- print(f"{account_id} {account_name} {cate_name} {param:.3f} {p_value:.3f}")
- truncate_param = round(max(min(param, 0.25), -0.25) * scale_factor, 6)
- current_record['category_map'][cate_name] = truncate_param
- # 用于冷启文章分配的负向品类
- if param < -0.1 and cate_name is not None and p_value < P_VALUE_THRESHOLD:
- account_negative_cates[account_id].append(cate_name)
- # print((account_name, cate_name, param, p_value))
- if not current_record['category_map']:
- continue
- current_record['category_map'] = json.dumps(current_record['category_map'], ensure_ascii=False)
- current_record['status'] = 1
- current_record['version'] = 2
- current_record['create_timestamp'] = create_timestamp
- current_record['update_timestamp'] = update_timestamp
- records_to_save.append(current_record)
- db_manager = MySQLManager(Config().MYSQL_LONG_ARTICLES)
- if args.dry_run:
- compare_version(db_manager, dt_version, records_to_save, account_id_map)
- return
- rows = db_manager.batch_insert('account_category', records_to_save, ignore=True)
- if rows != len(records_to_save):
- for record in records_to_save:
- sql = f"""
- UPDATE account_category
- SET category_map = '{record['category_map']}',
- update_timestamp = {record['update_timestamp']}
- WHERE dt = {record['dt']} AND gh_id = '{record['gh_id']}'
- AND category_map != '{record['category_map']}'
- AND version = 2
- """
- update_rows = db_manager.execute(sql)
- print(f"updated rows: {update_rows}, {record['gh_id']}")
- clear_old_version(db_manager, dt_version)
- # 过滤空账号
- for account_id in [*account_negative_cates.keys()]:
- if not account_negative_cates[account_id]:
- account_negative_cates.pop(account_id)
- # print(json.dumps(account_negative_cates, ensure_ascii=False, indent=2))
- if __name__ == '__main__':
- pd.set_option('display.max_columns', None)
- pd.set_option('display.max_rows', None)
- main()
|