account_position_info.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577
  1. import asyncio
  2. import traceback
  3. import numpy as np
  4. from collections import defaultdict
  5. from typing import Dict, List
  6. from pandas import DataFrame
  7. from scipy import stats
  8. from tqdm.asyncio import tqdm
  9. from datetime import datetime, timedelta
  10. class AccountPositionInfoConst:
  11. # 阅读率统计周期(秒)
  12. STATISTICS_PERIOD = 31 * 24 * 60 * 60
  13. # 一天的秒数
  14. ONE_DAY_IN_SECONDS = 60 * 60 * 24
  15. # 相对变化率阈值
  16. RELATIVE_VALUE_THRESHOLD = 0.1
  17. # 发文类型
  18. UNLIMITED_PUBLISH_TYPE = 10002
  19. BULK_PUBLISH_TYPE = 9
  20. # 文章位置
  21. ARTICLE_INDEX_LIST = [1, 2, 3, 4, 5, 6, 7, 8]
  22. # 默认粉丝
  23. DEFAULT_FANS = 0
  24. # 最低粉丝量
  25. MIN_FANS = 1000
  26. ARTICLES_DAILY = 1
  27. TOULIU = 2
  28. # 统计周期(天)
  29. STAT_PERIOD = 30
  30. # 默认点赞
  31. DEFAULT_LIKE = 0
  32. # 状态
  33. USING_STATUS = 1
  34. NOT_USING_STATUS = 0
  35. # 服务号
  36. GROUP_ACCOUNT_SET = {
  37. "gh_9cf3b7ff486b",
  38. "gh_ecb21c0453af",
  39. "gh_45beb952dc74",
  40. # "gh_84e744b16b3a",
  41. "gh_b3ffc1ca3a04",
  42. "gh_b8baac4296cb",
  43. "gh_efaf7da157f5",
  44. # "gh_5855bed97938",
  45. "gh_b32125c73861",
  46. "gh_761976bb98a6",
  47. "gh_5e543853d8f0",
  48. # "gh_61a72b720de3",
  49. }
  50. # 违禁账号
  51. FORBIDDEN_GH_IDS = {
  52. "gh_4c058673c07e",
  53. "gh_de9f9ebc976b",
  54. "gh_7b4a5f86d68c",
  55. "gh_f902cea89e48",
  56. "gh_789a40fe7935",
  57. "gh_cd041ed721e6",
  58. "gh_62d7f423f382",
  59. "gh_043223059726",
  60. "gh_6cfd1132df94",
  61. "gh_7f5075624a50",
  62. "gh_d4dffc34ac39",
  63. "gh_c69776baf2cd",
  64. "gh_9877c8541764",
  65. "gh_ac43e43b253b",
  66. "gh_93e00e187787",
  67. "gh_080bb43aa0dc",
  68. "gh_b1c71a0e7a85",
  69. "gh_d5f935d0d1f2",
  70. }
  71. # 投流账号
  72. TOULIU_ACCOUNTS = {
  73. "小阳看天下",
  74. "趣味生活方式",
  75. "趣味生活漫时光",
  76. "史趣探秘",
  77. "暖心一隅",
  78. "趣味生活漫谈",
  79. "历史长河流淌",
  80. "美好意义时光",
  81. "银发生活畅谈",
  82. "美好时光阅读汇",
  83. "时光趣味生活",
  84. "生活慢时光",
  85. }
  86. class AccountPositionReadRateAvg(AccountPositionInfoConst):
  87. """计算账号每个位置评价阅读率"""
  88. def __init__(self, pool, log_client, trace_id):
  89. self.pool = pool
  90. self.log_client = log_client
  91. self.trace_id = trace_id
  92. # 生成统计周期
  93. def generate_stat_duration(self, end_date: str) -> str:
  94. end_date_dt = datetime.strptime(end_date, "%Y-%m-%d")
  95. start_date_dt = end_date_dt - timedelta(seconds=self.STATISTICS_PERIOD)
  96. return start_date_dt.strftime("%Y-%m-%d")
  97. # 获取发文账号
  98. async def get_publishing_accounts(self):
  99. query = """
  100. select distinct
  101. t3.name as account_name,
  102. t3.gh_id as gh_id,
  103. group_concat(distinct t4.remark) as account_remark,
  104. t6.account_source_name as account_source,
  105. t6.mode_type as mode_type,
  106. t6.account_type as account_type,
  107. t6.`status` as status
  108. from
  109. publish_plan t1
  110. join publish_plan_account t2 on t1.id = t2.plan_id
  111. join publish_account t3 on t2.account_id = t3.id
  112. left join publish_account_remark t4 on t3.id = t4.publish_account_id
  113. left join wx_statistics_group_source_account t5 on t3.id = t5.account_id
  114. left join wx_statistics_group_source t6 on t5.group_source_name = t6.account_source_name
  115. where t1.plan_status = 1 and t1.content_modal = 3 and t3.channel = 5
  116. group by t3.id;
  117. """
  118. account_list = await self.pool.async_fetch(query, db_name="aigc")
  119. return [i for i in account_list if "自动回复" not in str(i["account_remark"])]
  120. # 获取统计周期内,每个账号的粉丝量
  121. async def get_fans_for_each_date(self, start_date: str):
  122. # 获取订阅号粉丝量
  123. query = """
  124. SELECT t1.date_str as dt,
  125. CASE
  126. WHEN t1.fans_count IS NULL OR t1.fans_count = 0 THEN t2.follower_count
  127. ELSE t1.fans_count
  128. END AS fans,
  129. t2.gh_id as gh_id
  130. FROM datastat_wx t1 JOIN publish_account t2 ON t1.account_id = t2.id
  131. WHERE t2.channel = 5 AND t2.status = 1 AND t1.date_str >= %s;
  132. """
  133. task1 = self.pool.async_fetch(query=query, db_name="aigc", params=(start_date,))
  134. if self.GROUP_ACCOUNT_SET:
  135. gh_ids = tuple(self.GROUP_ACCOUNT_SET)
  136. placeholders = ",".join(["%s"] * len(gh_ids))
  137. query_group = f"""
  138. SELECT gh_id, publish_date AS dt, CAST(SUM(sent_count) / 8 AS SIGNED) AS fans
  139. FROM long_articles_group_send_result
  140. WHERE publish_date >= %s AND gh_id IN ({placeholders})
  141. GROUP BY publish_date, gh_id;
  142. """
  143. params_group = (start_date, *gh_ids)
  144. task2 = self.pool.async_fetch(query=query_group, params=params_group)
  145. else:
  146. # 没有 group 账号,返回空列表
  147. task2 = asyncio.sleep(0, result=[])
  148. account_with_fans, group_account_with_fans = await asyncio.gather(task1, task2)
  149. # 合并粉丝数据
  150. account_dt_fans_mapper: Dict[str, Dict[str, int]] = defaultdict(dict)
  151. # 订阅号
  152. for item in account_with_fans or []:
  153. gh_id = item["gh_id"]
  154. dt = item["dt"]
  155. fans = int(item.get("fans") or 0)
  156. account_dt_fans_mapper[gh_id][dt] = fans
  157. # 服务号(覆盖相同 gh_id + dt)
  158. for item in group_account_with_fans or []:
  159. gh_id = item["gh_id"]
  160. dt = item["dt"]
  161. fans = int(item.get("fans") or 0)
  162. account_dt_fans_mapper[gh_id][dt] = fans
  163. return account_dt_fans_mapper
  164. # 从数据库获取账号群发文章 && 群发数据
  165. async def get_single_account_published_articles(
  166. self, gh_id: str, start_timestamp: int
  167. ):
  168. query = """
  169. SELECT
  170. ghId as gh_id, accountName as account_name,
  171. ItemIndex as position,
  172. CAST(AVG(show_view_count) AS SIGNED) as read_count,
  173. FROM_UNIXTIME(publish_timestamp, '%%Y-%%m-%%d') AS pub_dt
  174. FROM
  175. official_articles_v2
  176. WHERE
  177. ghId = %s and Type = %s and publish_timestamp >= %s
  178. GROUP BY ghId, accountName, ItemIndex, pub_dt;
  179. """
  180. return await self.pool.async_fetch(
  181. query=query,
  182. db_name="piaoquan_crawler",
  183. params=(gh_id, self.BULK_PUBLISH_TYPE, start_timestamp),
  184. )
  185. # 计算单个账号的每篇文章的阅读率
  186. async def cal_read_rate_for_single_account(
  187. self,
  188. publish_details: List[Dict],
  189. gh_id: str,
  190. fans_mapper: Dict[str, Dict[str, int]],
  191. ) -> DataFrame | None:
  192. if not publish_details:
  193. return None
  194. article_list_with_fans = []
  195. for article in publish_details:
  196. fans = fans_mapper.get(gh_id, {}).get(article["pub_dt"], self.DEFAULT_FANS)
  197. if not fans:
  198. print(
  199. f"账号 {article['account_name']} 在 {article['pub_dt']} 没有粉丝数据"
  200. )
  201. continue
  202. article["fans"] = fans
  203. if fans > self.MIN_FANS:
  204. article["read_rate"] = article["read_count"] / fans if fans else 0
  205. article_list_with_fans.append(article)
  206. # 转化为 DataFrame 方便后续处理
  207. return DataFrame(
  208. article_list_with_fans,
  209. columns=[
  210. "gh_id",
  211. "account_name",
  212. "position",
  213. "read_count",
  214. "pub_dt",
  215. "fans",
  216. "read_rate",
  217. ],
  218. )
  219. # 更新账号阅读率均值并且更新数据库
  220. async def update_read_rate_avg_for_each_account(
  221. self,
  222. account: dict,
  223. start_date: str,
  224. end_dt: str,
  225. df: DataFrame,
  226. fans_dict: Dict[str, int],
  227. ):
  228. avg_date = (datetime.strptime(end_dt, "%Y-%m-%d") - timedelta(days=1)).strftime(
  229. "%Y-%m-%d"
  230. )
  231. insert_error_list = []
  232. for index in self.ARTICLE_INDEX_LIST:
  233. # 过滤
  234. filter_df = df[
  235. (df["position"] == index)
  236. & (df["pub_dt"] < end_dt)
  237. & (df["pub_dt"] >= start_date)
  238. ]
  239. read_average = filter_df["read_count"].mean()
  240. read_std = filter_df["read_count"].std()
  241. output_df = filter_df[
  242. (filter_df["read_count"] > read_average - 2 * read_std)
  243. & (filter_df["read_count"] < read_average + 2 * read_std)
  244. ]
  245. records = len(output_df)
  246. if records:
  247. # todo: 需要检查波动
  248. # if index <= 2:
  249. # print("position need to be checked")
  250. # insert
  251. try:
  252. insert_query = """
  253. INSERT INTO long_articles_read_rate
  254. (account_name, gh_id, position, read_rate_avg, remark, articles_count, earliest_publish_time, latest_publish_time, dt_version, is_delete, fans)
  255. VALUES
  256. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  257. """
  258. await self.pool.async_save(
  259. query=insert_query,
  260. params=(
  261. account["account_name"],
  262. account["gh_id"],
  263. index,
  264. output_df["read_rate"].mean(),
  265. "从 {} 开始往前计算 31 天".format(start_date),
  266. records,
  267. output_df["pub_dt"].min(),
  268. output_df["pub_dt"].max(),
  269. avg_date.replace("-", ""),
  270. 0,
  271. fans_dict.get(avg_date, 0),
  272. ),
  273. )
  274. except Exception as e:
  275. insert_error_list.append(str(e))
  276. # 入口函数
  277. async def deal(self, end_date: str | None):
  278. if not end_date:
  279. end_date = datetime.now().strftime("%Y-%m-%d")
  280. start_dt = self.generate_stat_duration(end_date)
  281. fans_mapper = await self.get_fans_for_each_date(start_date=start_dt)
  282. accounts = await self.get_publishing_accounts()
  283. for account in tqdm(accounts, desc="计算单个账号阅读率均值"):
  284. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  285. continue
  286. published_articles = await self.get_single_account_published_articles(
  287. gh_id=account["gh_id"],
  288. start_timestamp=int(
  289. datetime.strptime(start_dt, "%Y-%m-%d").timestamp()
  290. ),
  291. )
  292. article_dataframe = await self.cal_read_rate_for_single_account(
  293. publish_details=published_articles,
  294. gh_id=account["gh_id"],
  295. fans_mapper=fans_mapper,
  296. )
  297. if article_dataframe is None:
  298. continue
  299. if article_dataframe.empty:
  300. continue
  301. await self.update_read_rate_avg_for_each_account(
  302. account=account,
  303. start_date=start_dt,
  304. end_dt=end_date,
  305. df=article_dataframe,
  306. fans_dict=fans_mapper.get(account["gh_id"], {}),
  307. )
  308. class AccountPositionReadAvg(AccountPositionReadRateAvg):
  309. # 计算阅读均值置信区间上限
  310. async def cal_read_avg_ci_upper(self, gh_id: str, index: int):
  311. fetch_query = f"""
  312. select read_avg, update_time
  313. from account_avg_info_v3
  314. where gh_id = %s and position = %s
  315. order by update_time desc limit 30;
  316. """
  317. fetch_response_list = await self.pool.async_fetch(
  318. query=fetch_query, db_name="piaoquan_crawler", params=(gh_id, index)
  319. )
  320. read_avg_list = [
  321. i["read_avg"] for i in fetch_response_list if i["read_avg"] is not None
  322. ]
  323. n = len(read_avg_list)
  324. mean = np.mean(read_avg_list)
  325. std = np.std(read_avg_list, ddof=1)
  326. se = std / np.sqrt(n)
  327. t = stats.t.ppf(0.975, df=n - 1)
  328. upper_t = mean + t * se
  329. return upper_t
  330. # 获取账号的阅读率均值信息
  331. async def get_accounts_read_avg(self, dt):
  332. query = """
  333. select gh_id, position, fans, read_rate_avg, fans * read_rate_avg as read_avg
  334. from long_articles_read_rate
  335. where dt_version = %s
  336. """
  337. fetch_result = await self.pool.async_fetch(
  338. query=query, params=(dt.replace("-", ""),)
  339. )
  340. response = {}
  341. for item in fetch_result:
  342. key = f"{item['gh_id']}_{item['position']}"
  343. response[key] = {
  344. "read_rate_avg": item["read_rate_avg"],
  345. "read_avg": item["read_avg"],
  346. "fans": item["fans"],
  347. }
  348. return response
  349. # 计算阅读均值置信区间上限
  350. async def cal_read_avg_detail(
  351. self, account: Dict, dt: str, account_with_read_rate_avg: Dict
  352. ):
  353. for index in self.ARTICLE_INDEX_LIST:
  354. key = f"{account['gh_id']}_{index}"
  355. print(key)
  356. if account_with_read_rate_avg.get(key) is None:
  357. continue
  358. read_avg = account_with_read_rate_avg[key]["read_avg"]
  359. # 计算阅读均值置信区间上限
  360. read_avg_ci_upper = await self.cal_read_avg_ci_upper(
  361. gh_id=account["gh_id"], index=index
  362. )
  363. await self.process_each_record(
  364. account=account,
  365. index=index,
  366. fans=account_with_read_rate_avg[key]["fans"],
  367. read_rate_avg=account_with_read_rate_avg[key]["read_rate_avg"],
  368. read_avg=read_avg,
  369. read_avg_ci_upper=read_avg_ci_upper,
  370. dt=dt,
  371. )
  372. async def process_each_record(
  373. self, account, index, fans, read_rate_avg, read_avg, read_avg_ci_upper, dt
  374. ):
  375. gh_id = account["gh_id"]
  376. account_name = account["account_name"]
  377. business_type = (
  378. self.TOULIU if account_name in self.TOULIU_ACCOUNTS else self.ARTICLES_DAILY
  379. )
  380. # insert into database
  381. insert_sql = f"""
  382. insert into account_avg_info_v3
  383. (gh_id, position, update_time, account_name, fans, read_avg, like_avg, status, account_type,
  384. account_mode, account_source, account_status, business_type, read_rate_avg, read_avg_ci_upper)
  385. values
  386. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  387. """
  388. try:
  389. await self.pool.async_save(
  390. query=insert_sql,
  391. db_name="piaoquan_crawler",
  392. params=(
  393. gh_id,
  394. index,
  395. dt,
  396. account["account_name"],
  397. fans,
  398. read_avg,
  399. self.DEFAULT_LIKE,
  400. self.USING_STATUS,
  401. account["account_type"],
  402. account["mode_type"],
  403. account["account_source"],
  404. account["status"],
  405. business_type,
  406. read_rate_avg,
  407. read_avg_ci_upper,
  408. ),
  409. )
  410. except Exception as e:
  411. print(e)
  412. update_sql = f"""
  413. update account_avg_info_v3
  414. set fans = %s, read_avg = %s, read_rate_avg = %s, read_avg_ci_upper = %s
  415. where gh_id = %s and position = %s and update_time = %s
  416. """
  417. try:
  418. await self.pool.async_save(
  419. query=update_sql,
  420. db_name="piaoquan_crawler",
  421. params=(
  422. fans,
  423. read_avg,
  424. read_rate_avg,
  425. read_avg_ci_upper,
  426. account["gh_id"],
  427. index,
  428. dt,
  429. ),
  430. )
  431. except Exception as e:
  432. print(e)
  433. # 修改前一天的状态为 0
  434. update_status_sql = f"""
  435. UPDATE account_avg_info_v3
  436. SET status = %s
  437. WHERE update_time != %s AND gh_id = %s AND position = %s;
  438. """
  439. await self.pool.async_save(
  440. query=update_status_sql,
  441. db_name="piaoquan_crawler",
  442. params=(self.NOT_USING_STATUS, dt, gh_id, index),
  443. )
  444. async def deal(self, end_date: str | None):
  445. if not end_date:
  446. end_date = datetime.now().strftime("%Y-%m-%d")
  447. dt = (datetime.strptime(end_date, "%Y-%m-%d") - timedelta(days=1)).strftime(
  448. "%Y-%m-%d"
  449. )
  450. account_with_read_rate_avg = await self.get_accounts_read_avg(dt)
  451. accounts = await self.get_publishing_accounts()
  452. for account in tqdm(accounts, desc="计算单个账号的阅读均值"):
  453. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  454. continue
  455. try:
  456. await self.cal_read_avg_detail(
  457. account=account,
  458. dt=dt,
  459. account_with_read_rate_avg=account_with_read_rate_avg,
  460. )
  461. except Exception as e:
  462. print(f"计算账号 {account['account_name']} 阅读均值失败 : {e}")
  463. print(traceback.format_exc())
  464. class AccountPositionOpenRateAvg(AccountPositionReadRateAvg):
  465. async def get_account_open_rate(self, gh_id: str, date_string: str) -> float:
  466. fetch_query = f"""
  467. select
  468. sum(view_count) as 'total_read',
  469. sum(first_level) as 'total_first_level',
  470. sum(first_level) / sum(view_count) as 'avg_open_rate'
  471. from datastat_sort_strategy
  472. where gh_id = '{gh_id}' and date_str between date_sub(str_to_date('{date_string}', '%Y%m%d'), interval {self.STAT_PERIOD} day)
  473. and str_to_date('{date_string}', '%Y%m%d');
  474. """
  475. res = await self.pool.async_fetch(query=fetch_query)
  476. return float(res[0]["avg_open_rate"]) if res else 0.0
  477. async def set_avg_open_rate_for_each_account(
  478. self, gh_id: str, date_string: str, avg_read_rate: float
  479. ) -> int:
  480. update_query = """
  481. update account_avg_info_v3
  482. set open_rate_avg = %s
  483. where gh_id = %s and update_time = %s;
  484. """
  485. return await self.pool.async_save(
  486. query=update_query,
  487. db_name="piaoquan_crawler",
  488. params=(avg_read_rate, gh_id, date_string),
  489. )
  490. async def deal(self, date_string: str | None):
  491. if not date_string:
  492. date_string = datetime.now().strftime("%Y-%m-%d")
  493. dt = (datetime.strptime(date_string, "%Y-%m-%d") - timedelta(days=1)).strftime(
  494. "%Y-%m-%d"
  495. )
  496. account_list = await self.get_publishing_accounts()
  497. for account in tqdm(account_list, desc="计算单个账号的打开率均值"):
  498. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  499. continue
  500. try:
  501. avg_open_rate = await self.get_account_open_rate(
  502. gh_id=account["gh_id"], date_string=dt.replace("-", "")
  503. )
  504. await self.set_avg_open_rate_for_each_account(
  505. gh_id=account["gh_id"],
  506. date_string=dt,
  507. avg_read_rate=avg_open_rate,
  508. )
  509. except Exception as e:
  510. print(f"计算账号 {account['account_name']} 打开率均值失败 : {e}")
  511. print(traceback.format_exc())
  512. continue