account_position_info.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589
  1. import asyncio
  2. import traceback
  3. import numpy as np
  4. from collections import defaultdict
  5. from typing import Dict, List, Set
  6. from pandas import DataFrame
  7. from scipy import stats
  8. from tqdm.asyncio import tqdm
  9. from datetime import datetime, timedelta
  10. class AccountPositionInfoConst:
  11. # 阅读率统计周期(秒)
  12. STATISTICS_PERIOD = 31 * 24 * 60 * 60
  13. # 一天的秒数
  14. ONE_DAY_IN_SECONDS = 60 * 60 * 24
  15. # 相对变化率阈值
  16. RELATIVE_VALUE_THRESHOLD = 0.1
  17. # 发文类型
  18. UNLIMITED_PUBLISH_TYPE = 10002
  19. BULK_PUBLISH_TYPE = 9
  20. # 文章位置
  21. ARTICLE_INDEX_LIST = [1, 2, 3, 4, 5, 6, 7, 8]
  22. # 默认粉丝
  23. DEFAULT_FANS = 0
  24. # 最低粉丝量
  25. MIN_FANS = 1000
  26. ARTICLES_DAILY = 1
  27. TOULIU = 2
  28. # 统计周期(天)
  29. STAT_PERIOD = 30
  30. # 默认点赞
  31. DEFAULT_LIKE = 0
  32. # 状态
  33. USING_STATUS = 1
  34. NOT_USING_STATUS = 0
  35. PUBLISH_SUCCESS_STATUS = 2
  36. # 不再使用的服务号
  37. NOT_USED_SERVER_ACCOUNT = {"gh_84e744b16b3a", "gh_5855bed97938", "gh_61a72b720de3"}
  38. # 违禁账号 or 迁移账号
  39. FORBIDDEN_GH_IDS = {
  40. "gh_4c058673c07e",
  41. "gh_de9f9ebc976b",
  42. "gh_7b4a5f86d68c",
  43. "gh_f902cea89e48",
  44. "gh_789a40fe7935",
  45. "gh_cd041ed721e6",
  46. "gh_62d7f423f382",
  47. "gh_043223059726",
  48. "gh_6cfd1132df94",
  49. "gh_7f5075624a50",
  50. "gh_d4dffc34ac39",
  51. "gh_c69776baf2cd",
  52. "gh_9877c8541764",
  53. "gh_ac43e43b253b",
  54. "gh_93e00e187787",
  55. "gh_080bb43aa0dc",
  56. "gh_b1c71a0e7a85",
  57. "gh_d5f935d0d1f2",
  58. "gh_6b7c2a257263",
  59. "gh_bfe5b705324a",
  60. "gh_7e5818b2dd83",
  61. "gh_a2901d34f75b",
  62. "gh_5ae65db96cb7",
  63. "gh_72bace6b3059",
  64. "gh_dd4c857bbb36",
  65. }
  66. # 投流账号
  67. TOULIU_ACCOUNTS = {
  68. "小阳看天下",
  69. "趣味生活方式",
  70. "趣味生活漫时光",
  71. "史趣探秘",
  72. "暖心一隅",
  73. "趣味生活漫谈",
  74. "历史长河流淌",
  75. "美好意义时光",
  76. "银发生活畅谈",
  77. "美好时光阅读汇",
  78. "时光趣味生活",
  79. "生活慢时光",
  80. }
  81. class AccountPositionReadRateAvg(AccountPositionInfoConst):
  82. """计算账号每个位置评价阅读率"""
  83. def __init__(self, pool, log_client, trace_id):
  84. self.pool = pool
  85. self.log_client = log_client
  86. self.trace_id = trace_id
  87. # 生成统计周期
  88. def generate_stat_duration(self, end_date: str) -> str:
  89. end_date_dt = datetime.strptime(end_date, "%Y-%m-%d")
  90. start_date_dt = end_date_dt - timedelta(seconds=self.STATISTICS_PERIOD)
  91. return start_date_dt.strftime("%Y-%m-%d")
  92. # 获取发文账号
  93. async def get_publishing_accounts(self):
  94. query = """
  95. select distinct
  96. t3.name as account_name,
  97. t3.gh_id as gh_id,
  98. group_concat(distinct t4.remark) as account_remark,
  99. t6.account_source_name as account_source,
  100. t6.mode_type as mode_type,
  101. t6.account_type as account_type,
  102. t6.`status` as status
  103. from
  104. publish_plan t1
  105. join publish_plan_account t2 on t1.id = t2.plan_id
  106. join publish_account t3 on t2.account_id = t3.id
  107. left join publish_account_remark t4 on t3.id = t4.publish_account_id
  108. left join wx_statistics_group_source_account t5 on t3.id = t5.account_id
  109. left join wx_statistics_group_source t6 on t5.group_source_name = t6.account_source_name
  110. where t1.plan_status = 1 and t1.content_modal = 3 and t3.channel = 5
  111. group by t3.id;
  112. """
  113. account_list = await self.pool.async_fetch(query, db_name="aigc")
  114. return [i for i in account_list if "自动回复" not in str(i["account_remark"])]
  115. # 获取服务号分组发文信息
  116. async def get_server_group_publish_accounts(self) -> Set[str]:
  117. query = """
  118. select gzh_id from article_gzh_developer;
  119. """
  120. fetch_response = await self.pool.async_fetch(
  121. query=query, db_name="piaoquan_crawler"
  122. )
  123. gh_id_list = [
  124. i["gzh_id"]
  125. for i in fetch_response
  126. if i["gzh_id"] not in self.NOT_USED_SERVER_ACCOUNT
  127. ]
  128. return set(gh_id_list)
  129. # 获取统计周期内,每个账号的粉丝量
  130. async def get_fans_for_each_date(self, start_date: str):
  131. # 获取订阅号粉丝量
  132. query = """
  133. SELECT t1.date_str as dt,
  134. CASE
  135. WHEN t1.fans_count IS NULL OR t1.fans_count = 0 THEN t2.follower_count
  136. ELSE t1.fans_count
  137. END AS fans,
  138. t2.gh_id as gh_id
  139. FROM datastat_wx t1 JOIN publish_account t2 ON t1.account_id = t2.id
  140. WHERE t2.channel = 5 AND t2.status = 1 AND t1.date_str >= %s;
  141. """
  142. task1 = self.pool.async_fetch(query=query, db_name="aigc", params=(start_date,))
  143. group_account_set = await self.get_server_group_publish_accounts()
  144. if group_account_set:
  145. query_group = f"""
  146. select publish_date as dt, gh_id, account_name, CAST(SUM(total_sent_fans) AS SIGNED) AS fans
  147. from (
  148. select publish_date, account_name, gh_id, push_id, avg(sent_count) as 'total_sent_fans'
  149. from long_articles_group_send_result
  150. where publish_date >= %s and status = %s
  151. group by publish_date, account_name, push_id
  152. ) as lagsr
  153. group by lagsr.publish_date, gh_id;
  154. """
  155. params_group = (start_date, self.PUBLISH_SUCCESS_STATUS)
  156. task2 = self.pool.async_fetch(query=query_group, params=params_group)
  157. else:
  158. # 没有 group 账号,返回空列表
  159. task2 = asyncio.sleep(0, result=[])
  160. account_with_fans, group_account_with_fans = await asyncio.gather(task1, task2)
  161. # 合并粉丝数据
  162. account_dt_fans_mapper: Dict[str, Dict[str, int]] = defaultdict(dict)
  163. # 订阅号
  164. for item in account_with_fans or []:
  165. gh_id = item["gh_id"]
  166. dt = item["dt"]
  167. fans = int(item.get("fans") or 0)
  168. account_dt_fans_mapper[gh_id][dt] = fans
  169. # 服务号(覆盖相同 gh_id + dt)
  170. for item in group_account_with_fans or []:
  171. gh_id = item["gh_id"]
  172. dt = item["dt"]
  173. fans = int(item.get("fans") or 0)
  174. account_dt_fans_mapper[gh_id][dt] = fans
  175. return account_dt_fans_mapper
  176. # 从数据库获取账号群发文章 && 群发数据
  177. async def get_single_account_published_articles(
  178. self, gh_id: str, start_timestamp: int
  179. ):
  180. query = """
  181. SELECT
  182. ghId as gh_id, accountName as account_name,
  183. ItemIndex as position,
  184. CAST(AVG(show_view_count) AS SIGNED) as read_count,
  185. FROM_UNIXTIME(publish_timestamp, '%%Y-%%m-%%d') AS pub_dt
  186. FROM
  187. official_articles_v2
  188. WHERE
  189. ghId = %s and Type = %s and publish_timestamp >= %s
  190. GROUP BY ghId, accountName, ItemIndex, pub_dt;
  191. """
  192. return await self.pool.async_fetch(
  193. query=query,
  194. db_name="piaoquan_crawler",
  195. params=(gh_id, self.BULK_PUBLISH_TYPE, start_timestamp),
  196. )
  197. # 计算单个账号的每篇文章的阅读率
  198. async def cal_read_rate_for_single_account(
  199. self,
  200. publish_details: List[Dict],
  201. gh_id: str,
  202. fans_mapper: Dict[str, Dict[str, int]],
  203. ) -> DataFrame | None:
  204. if not publish_details:
  205. return None
  206. article_list_with_fans = []
  207. for article in publish_details:
  208. fans = fans_mapper.get(gh_id, {}).get(article["pub_dt"], self.DEFAULT_FANS)
  209. if not fans:
  210. print(
  211. f"账号 {article['account_name']} 在 {article['pub_dt']} 没有粉丝数据"
  212. )
  213. continue
  214. article["fans"] = fans
  215. if fans > self.MIN_FANS:
  216. article["read_rate"] = article["read_count"] / fans if fans else 0
  217. article_list_with_fans.append(article)
  218. # 转化为 DataFrame 方便后续处理
  219. return DataFrame(
  220. article_list_with_fans,
  221. columns=[
  222. "gh_id",
  223. "account_name",
  224. "position",
  225. "read_count",
  226. "pub_dt",
  227. "fans",
  228. "read_rate",
  229. ],
  230. )
  231. # 更新账号阅读率均值并且更新数据库
  232. async def update_read_rate_avg_for_each_account(
  233. self,
  234. account: dict,
  235. start_date: str,
  236. end_dt: str,
  237. df: DataFrame,
  238. fans_dict: Dict[str, int],
  239. ):
  240. avg_date = (datetime.strptime(end_dt, "%Y-%m-%d") - timedelta(days=1)).strftime(
  241. "%Y-%m-%d"
  242. )
  243. insert_error_list = []
  244. for index in self.ARTICLE_INDEX_LIST:
  245. # 过滤
  246. filter_df = df[
  247. (df["position"] == index)
  248. & (df["pub_dt"] < end_dt)
  249. & (df["pub_dt"] >= start_date)
  250. ]
  251. read_average = filter_df["read_count"].mean()
  252. read_std = filter_df["read_count"].std()
  253. output_df = filter_df[
  254. (filter_df["read_count"] > read_average - 2 * read_std)
  255. & (filter_df["read_count"] < read_average + 2 * read_std)
  256. ]
  257. records = len(output_df)
  258. if records:
  259. # todo: 需要检查波动
  260. # if index <= 2:
  261. # print("position need to be checked")
  262. # insert
  263. try:
  264. insert_query = """
  265. INSERT INTO long_articles_read_rate
  266. (account_name, gh_id, position, read_rate_avg, remark, articles_count, earliest_publish_time, latest_publish_time, dt_version, is_delete, fans)
  267. VALUES
  268. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  269. """
  270. await self.pool.async_save(
  271. query=insert_query,
  272. params=(
  273. account["account_name"],
  274. account["gh_id"],
  275. index,
  276. output_df["read_rate"].mean(),
  277. "从 {} 开始往前计算 31 天".format(start_date),
  278. records,
  279. output_df["pub_dt"].min(),
  280. output_df["pub_dt"].max(),
  281. avg_date.replace("-", ""),
  282. 0,
  283. fans_dict.get(avg_date, 0),
  284. ),
  285. )
  286. except Exception as e:
  287. insert_error_list.append(str(e))
  288. # 入口函数
  289. async def deal(self, end_date: str | None):
  290. if not end_date:
  291. end_date = datetime.now().strftime("%Y-%m-%d")
  292. start_dt = self.generate_stat_duration(end_date)
  293. fans_mapper = await self.get_fans_for_each_date(start_date=start_dt)
  294. accounts = await self.get_publishing_accounts()
  295. for account in tqdm(accounts, desc="计算单个账号阅读率均值"):
  296. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  297. continue
  298. published_articles = await self.get_single_account_published_articles(
  299. gh_id=account["gh_id"],
  300. start_timestamp=int(
  301. datetime.strptime(start_dt, "%Y-%m-%d").timestamp()
  302. ),
  303. )
  304. article_dataframe = await self.cal_read_rate_for_single_account(
  305. publish_details=published_articles,
  306. gh_id=account["gh_id"],
  307. fans_mapper=fans_mapper,
  308. )
  309. if article_dataframe is None:
  310. continue
  311. if article_dataframe.empty:
  312. continue
  313. await self.update_read_rate_avg_for_each_account(
  314. account=account,
  315. start_date=start_dt,
  316. end_dt=end_date,
  317. df=article_dataframe,
  318. fans_dict=fans_mapper.get(account["gh_id"], {}),
  319. )
  320. class AccountPositionReadAvg(AccountPositionReadRateAvg):
  321. # 计算阅读均值置信区间上限
  322. async def cal_read_avg_ci_upper(self, gh_id: str, index: int):
  323. fetch_query = f"""
  324. select read_avg, update_time
  325. from account_avg_info_v3
  326. where gh_id = %s and position = %s
  327. order by update_time desc limit 30;
  328. """
  329. fetch_response_list = await self.pool.async_fetch(
  330. query=fetch_query, db_name="piaoquan_crawler", params=(gh_id, index)
  331. )
  332. read_avg_list = [
  333. i["read_avg"] for i in fetch_response_list if i["read_avg"] is not None
  334. ]
  335. n = len(read_avg_list)
  336. mean = np.mean(read_avg_list)
  337. std = np.std(read_avg_list, ddof=1)
  338. se = std / np.sqrt(n)
  339. t = stats.t.ppf(0.975, df=n - 1)
  340. upper_t = mean + t * se
  341. return upper_t
  342. # 获取账号的阅读率均值信息
  343. async def get_accounts_read_avg(self, dt):
  344. query = """
  345. select gh_id, position, fans, read_rate_avg, fans * read_rate_avg as read_avg
  346. from long_articles_read_rate
  347. where dt_version = %s
  348. """
  349. fetch_result = await self.pool.async_fetch(
  350. query=query, params=(dt.replace("-", ""),)
  351. )
  352. response = {}
  353. for item in fetch_result:
  354. key = f"{item['gh_id']}_{item['position']}"
  355. response[key] = {
  356. "read_rate_avg": item["read_rate_avg"],
  357. "read_avg": item["read_avg"],
  358. "fans": item["fans"],
  359. }
  360. return response
  361. # 计算阅读均值置信区间上限
  362. async def cal_read_avg_detail(
  363. self, account: Dict, dt: str, account_with_read_rate_avg: Dict
  364. ):
  365. for index in self.ARTICLE_INDEX_LIST:
  366. key = f"{account['gh_id']}_{index}"
  367. print(key)
  368. if account_with_read_rate_avg.get(key) is None:
  369. continue
  370. read_avg = account_with_read_rate_avg[key]["read_avg"]
  371. # 计算阅读均值置信区间上限
  372. read_avg_ci_upper = await self.cal_read_avg_ci_upper(
  373. gh_id=account["gh_id"], index=index
  374. )
  375. await self.process_each_record(
  376. account=account,
  377. index=index,
  378. fans=account_with_read_rate_avg[key]["fans"],
  379. read_rate_avg=account_with_read_rate_avg[key]["read_rate_avg"],
  380. read_avg=read_avg,
  381. read_avg_ci_upper=read_avg_ci_upper,
  382. dt=dt,
  383. )
  384. async def process_each_record(
  385. self, account, index, fans, read_rate_avg, read_avg, read_avg_ci_upper, dt
  386. ):
  387. gh_id = account["gh_id"]
  388. account_name = account["account_name"]
  389. business_type = (
  390. self.TOULIU if account_name in self.TOULIU_ACCOUNTS else self.ARTICLES_DAILY
  391. )
  392. # insert into database
  393. insert_sql = f"""
  394. insert into account_avg_info_v3
  395. (gh_id, position, update_time, account_name, fans, read_avg, like_avg, status, account_type,
  396. account_mode, account_source, account_status, business_type, read_rate_avg, read_avg_ci_upper)
  397. values
  398. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  399. """
  400. try:
  401. await self.pool.async_save(
  402. query=insert_sql,
  403. db_name="piaoquan_crawler",
  404. params=(
  405. gh_id,
  406. index,
  407. dt,
  408. account["account_name"],
  409. fans,
  410. read_avg,
  411. self.DEFAULT_LIKE,
  412. self.USING_STATUS,
  413. account["account_type"],
  414. account["mode_type"],
  415. account["account_source"],
  416. account["status"],
  417. business_type,
  418. read_rate_avg,
  419. read_avg_ci_upper,
  420. ),
  421. )
  422. except Exception as e:
  423. print(e)
  424. update_sql = f"""
  425. update account_avg_info_v3
  426. set fans = %s, read_avg = %s, read_rate_avg = %s, read_avg_ci_upper = %s
  427. where gh_id = %s and position = %s and update_time = %s
  428. """
  429. try:
  430. await self.pool.async_save(
  431. query=update_sql,
  432. db_name="piaoquan_crawler",
  433. params=(
  434. fans,
  435. read_avg,
  436. read_rate_avg,
  437. read_avg_ci_upper,
  438. account["gh_id"],
  439. index,
  440. dt,
  441. ),
  442. )
  443. except Exception as e:
  444. print(e)
  445. # 修改前一天的状态为 0
  446. update_status_sql = f"""
  447. UPDATE account_avg_info_v3
  448. SET status = %s
  449. WHERE update_time != %s AND gh_id = %s AND position = %s;
  450. """
  451. await self.pool.async_save(
  452. query=update_status_sql,
  453. db_name="piaoquan_crawler",
  454. params=(self.NOT_USING_STATUS, dt, gh_id, index),
  455. )
  456. async def deal(self, end_date: str | None):
  457. if not end_date:
  458. end_date = datetime.now().strftime("%Y-%m-%d")
  459. dt = (datetime.strptime(end_date, "%Y-%m-%d") - timedelta(days=1)).strftime(
  460. "%Y-%m-%d"
  461. )
  462. account_with_read_rate_avg = await self.get_accounts_read_avg(dt)
  463. accounts = await self.get_publishing_accounts()
  464. for account in tqdm(accounts, desc="计算单个账号的阅读均值"):
  465. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  466. continue
  467. try:
  468. await self.cal_read_avg_detail(
  469. account=account,
  470. dt=dt,
  471. account_with_read_rate_avg=account_with_read_rate_avg,
  472. )
  473. except Exception as e:
  474. print(f"计算账号 {account['account_name']} 阅读均值失败 : {e}")
  475. print(traceback.format_exc())
  476. class AccountPositionOpenRateAvg(AccountPositionReadRateAvg):
  477. async def get_account_open_rate(self, gh_id: str, date_string: str) -> float:
  478. fetch_query = f"""
  479. select
  480. sum(view_count) as 'total_read',
  481. sum(first_level) as 'total_first_level',
  482. sum(first_level) / sum(view_count) as 'avg_open_rate'
  483. from datastat_sort_strategy
  484. where gh_id = '{gh_id}' and date_str between date_sub(str_to_date('{date_string}', '%Y%m%d'), interval {self.STAT_PERIOD} day)
  485. and str_to_date('{date_string}', '%Y%m%d');
  486. """
  487. res = await self.pool.async_fetch(query=fetch_query)
  488. return float(res[0]["avg_open_rate"]) if res else 0.0
  489. async def set_avg_open_rate_for_each_account(
  490. self, gh_id: str, date_string: str, avg_read_rate: float
  491. ) -> int:
  492. update_query = """
  493. update account_avg_info_v3
  494. set open_rate_avg = %s
  495. where gh_id = %s and update_time = %s;
  496. """
  497. return await self.pool.async_save(
  498. query=update_query,
  499. db_name="piaoquan_crawler",
  500. params=(avg_read_rate, gh_id, date_string),
  501. )
  502. async def deal(self, date_string: str | None):
  503. if not date_string:
  504. date_string = datetime.now().strftime("%Y-%m-%d")
  505. dt = (datetime.strptime(date_string, "%Y-%m-%d") - timedelta(days=1)).strftime(
  506. "%Y-%m-%d"
  507. )
  508. account_list = await self.get_publishing_accounts()
  509. for account in tqdm(account_list, desc="计算单个账号的打开率均值"):
  510. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  511. continue
  512. try:
  513. avg_open_rate = await self.get_account_open_rate(
  514. gh_id=account["gh_id"], date_string=dt.replace("-", "")
  515. )
  516. await self.set_avg_open_rate_for_each_account(
  517. gh_id=account["gh_id"],
  518. date_string=dt,
  519. avg_read_rate=avg_open_rate,
  520. )
  521. except Exception as e:
  522. print(f"计算账号 {account['account_name']} 打开率均值失败 : {e}")
  523. print(traceback.format_exc())
  524. continue