account_position_info.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582
  1. import asyncio
  2. import traceback
  3. import numpy as np
  4. from collections import defaultdict
  5. from typing import Dict, List, Set
  6. from pandas import DataFrame
  7. from scipy import stats
  8. from tqdm.asyncio import tqdm
  9. from datetime import datetime, timedelta
  10. class AccountPositionInfoConst:
  11. # 阅读率统计周期(秒)
  12. STATISTICS_PERIOD = 31 * 24 * 60 * 60
  13. # 一天的秒数
  14. ONE_DAY_IN_SECONDS = 60 * 60 * 24
  15. # 相对变化率阈值
  16. RELATIVE_VALUE_THRESHOLD = 0.1
  17. # 发文类型
  18. UNLIMITED_PUBLISH_TYPE = 10002
  19. BULK_PUBLISH_TYPE = 9
  20. # 文章位置
  21. ARTICLE_INDEX_LIST = [1, 2, 3, 4, 5, 6, 7, 8]
  22. # 默认粉丝
  23. DEFAULT_FANS = 0
  24. # 最低粉丝量
  25. MIN_FANS = 1000
  26. ARTICLES_DAILY = 1
  27. TOULIU = 2
  28. # 统计周期(天)
  29. STAT_PERIOD = 30
  30. # 默认点赞
  31. DEFAULT_LIKE = 0
  32. # 状态
  33. USING_STATUS = 1
  34. NOT_USING_STATUS = 0
  35. PUBLISH_SUCCESS_STATUS = 2
  36. # 不再使用的服务号
  37. NOT_USED_SERVER_ACCOUNT = {"gh_84e744b16b3a", "gh_5855bed97938", "gh_61a72b720de3"}
  38. # 违禁账号
  39. FORBIDDEN_GH_IDS = {
  40. "gh_4c058673c07e",
  41. "gh_de9f9ebc976b",
  42. "gh_7b4a5f86d68c",
  43. "gh_f902cea89e48",
  44. "gh_789a40fe7935",
  45. "gh_cd041ed721e6",
  46. "gh_62d7f423f382",
  47. "gh_043223059726",
  48. "gh_6cfd1132df94",
  49. "gh_7f5075624a50",
  50. "gh_d4dffc34ac39",
  51. "gh_c69776baf2cd",
  52. "gh_9877c8541764",
  53. "gh_ac43e43b253b",
  54. "gh_93e00e187787",
  55. "gh_080bb43aa0dc",
  56. "gh_b1c71a0e7a85",
  57. "gh_d5f935d0d1f2",
  58. }
  59. # 投流账号
  60. TOULIU_ACCOUNTS = {
  61. "小阳看天下",
  62. "趣味生活方式",
  63. "趣味生活漫时光",
  64. "史趣探秘",
  65. "暖心一隅",
  66. "趣味生活漫谈",
  67. "历史长河流淌",
  68. "美好意义时光",
  69. "银发生活畅谈",
  70. "美好时光阅读汇",
  71. "时光趣味生活",
  72. "生活慢时光",
  73. }
  74. class AccountPositionReadRateAvg(AccountPositionInfoConst):
  75. """计算账号每个位置评价阅读率"""
  76. def __init__(self, pool, log_client, trace_id):
  77. self.pool = pool
  78. self.log_client = log_client
  79. self.trace_id = trace_id
  80. # 生成统计周期
  81. def generate_stat_duration(self, end_date: str) -> str:
  82. end_date_dt = datetime.strptime(end_date, "%Y-%m-%d")
  83. start_date_dt = end_date_dt - timedelta(seconds=self.STATISTICS_PERIOD)
  84. return start_date_dt.strftime("%Y-%m-%d")
  85. # 获取发文账号
  86. async def get_publishing_accounts(self):
  87. query = """
  88. select distinct
  89. t3.name as account_name,
  90. t3.gh_id as gh_id,
  91. group_concat(distinct t4.remark) as account_remark,
  92. t6.account_source_name as account_source,
  93. t6.mode_type as mode_type,
  94. t6.account_type as account_type,
  95. t6.`status` as status
  96. from
  97. publish_plan t1
  98. join publish_plan_account t2 on t1.id = t2.plan_id
  99. join publish_account t3 on t2.account_id = t3.id
  100. left join publish_account_remark t4 on t3.id = t4.publish_account_id
  101. left join wx_statistics_group_source_account t5 on t3.id = t5.account_id
  102. left join wx_statistics_group_source t6 on t5.group_source_name = t6.account_source_name
  103. where t1.plan_status = 1 and t1.content_modal = 3 and t3.channel = 5
  104. group by t3.id;
  105. """
  106. account_list = await self.pool.async_fetch(query, db_name="aigc")
  107. return [i for i in account_list if "自动回复" not in str(i["account_remark"])]
  108. # 获取服务号分组发文信息
  109. async def get_server_group_publish_accounts(self) -> Set[str]:
  110. query = """
  111. select gzh_id from article_gzh_developer;
  112. """
  113. fetch_response = await self.pool.async_fetch(
  114. query=query, db_name="piaoquan_crawler"
  115. )
  116. gh_id_list = [
  117. i["gzh_id"]
  118. for i in fetch_response
  119. if i["gzh_id"] not in self.NOT_USED_SERVER_ACCOUNT
  120. ]
  121. return set(gh_id_list)
  122. # 获取统计周期内,每个账号的粉丝量
  123. async def get_fans_for_each_date(self, start_date: str):
  124. # 获取订阅号粉丝量
  125. query = """
  126. SELECT t1.date_str as dt,
  127. CASE
  128. WHEN t1.fans_count IS NULL OR t1.fans_count = 0 THEN t2.follower_count
  129. ELSE t1.fans_count
  130. END AS fans,
  131. t2.gh_id as gh_id
  132. FROM datastat_wx t1 JOIN publish_account t2 ON t1.account_id = t2.id
  133. WHERE t2.channel = 5 AND t2.status = 1 AND t1.date_str >= %s;
  134. """
  135. task1 = self.pool.async_fetch(query=query, db_name="aigc", params=(start_date,))
  136. group_account_set = await self.get_server_group_publish_accounts()
  137. if group_account_set:
  138. query_group = f"""
  139. select publish_date as dt, gh_id, account_name, CAST(SUM(total_sent_fans) AS SIGNED) AS fans
  140. from (
  141. select publish_date, account_name, gh_id, push_id, avg(sent_count) as 'total_sent_fans'
  142. from long_articles_group_send_result
  143. where publish_date >= %s and status = %s
  144. group by publish_date, account_name, push_id
  145. ) as lagsr
  146. group by lagsr.publish_date, gh_id;
  147. """
  148. params_group = (start_date, self.PUBLISH_SUCCESS_STATUS)
  149. task2 = self.pool.async_fetch(query=query_group, params=params_group)
  150. else:
  151. # 没有 group 账号,返回空列表
  152. task2 = asyncio.sleep(0, result=[])
  153. account_with_fans, group_account_with_fans = await asyncio.gather(task1, task2)
  154. # 合并粉丝数据
  155. account_dt_fans_mapper: Dict[str, Dict[str, int]] = defaultdict(dict)
  156. # 订阅号
  157. for item in account_with_fans or []:
  158. gh_id = item["gh_id"]
  159. dt = item["dt"]
  160. fans = int(item.get("fans") or 0)
  161. account_dt_fans_mapper[gh_id][dt] = fans
  162. # 服务号(覆盖相同 gh_id + dt)
  163. for item in group_account_with_fans or []:
  164. gh_id = item["gh_id"]
  165. dt = item["dt"]
  166. fans = int(item.get("fans") or 0)
  167. account_dt_fans_mapper[gh_id][dt] = fans
  168. return account_dt_fans_mapper
  169. # 从数据库获取账号群发文章 && 群发数据
  170. async def get_single_account_published_articles(
  171. self, gh_id: str, start_timestamp: int
  172. ):
  173. query = """
  174. SELECT
  175. ghId as gh_id, accountName as account_name,
  176. ItemIndex as position,
  177. CAST(AVG(show_view_count) AS SIGNED) as read_count,
  178. FROM_UNIXTIME(publish_timestamp, '%%Y-%%m-%%d') AS pub_dt
  179. FROM
  180. official_articles_v2
  181. WHERE
  182. ghId = %s and Type = %s and publish_timestamp >= %s
  183. GROUP BY ghId, accountName, ItemIndex, pub_dt;
  184. """
  185. return await self.pool.async_fetch(
  186. query=query,
  187. db_name="piaoquan_crawler",
  188. params=(gh_id, self.BULK_PUBLISH_TYPE, start_timestamp),
  189. )
  190. # 计算单个账号的每篇文章的阅读率
  191. async def cal_read_rate_for_single_account(
  192. self,
  193. publish_details: List[Dict],
  194. gh_id: str,
  195. fans_mapper: Dict[str, Dict[str, int]],
  196. ) -> DataFrame | None:
  197. if not publish_details:
  198. return None
  199. article_list_with_fans = []
  200. for article in publish_details:
  201. fans = fans_mapper.get(gh_id, {}).get(article["pub_dt"], self.DEFAULT_FANS)
  202. if not fans:
  203. print(
  204. f"账号 {article['account_name']} 在 {article['pub_dt']} 没有粉丝数据"
  205. )
  206. continue
  207. article["fans"] = fans
  208. if fans > self.MIN_FANS:
  209. article["read_rate"] = article["read_count"] / fans if fans else 0
  210. article_list_with_fans.append(article)
  211. # 转化为 DataFrame 方便后续处理
  212. return DataFrame(
  213. article_list_with_fans,
  214. columns=[
  215. "gh_id",
  216. "account_name",
  217. "position",
  218. "read_count",
  219. "pub_dt",
  220. "fans",
  221. "read_rate",
  222. ],
  223. )
  224. # 更新账号阅读率均值并且更新数据库
  225. async def update_read_rate_avg_for_each_account(
  226. self,
  227. account: dict,
  228. start_date: str,
  229. end_dt: str,
  230. df: DataFrame,
  231. fans_dict: Dict[str, int],
  232. ):
  233. avg_date = (datetime.strptime(end_dt, "%Y-%m-%d") - timedelta(days=1)).strftime(
  234. "%Y-%m-%d"
  235. )
  236. insert_error_list = []
  237. for index in self.ARTICLE_INDEX_LIST:
  238. # 过滤
  239. filter_df = df[
  240. (df["position"] == index)
  241. & (df["pub_dt"] < end_dt)
  242. & (df["pub_dt"] >= start_date)
  243. ]
  244. read_average = filter_df["read_count"].mean()
  245. read_std = filter_df["read_count"].std()
  246. output_df = filter_df[
  247. (filter_df["read_count"] > read_average - 2 * read_std)
  248. & (filter_df["read_count"] < read_average + 2 * read_std)
  249. ]
  250. records = len(output_df)
  251. if records:
  252. # todo: 需要检查波动
  253. # if index <= 2:
  254. # print("position need to be checked")
  255. # insert
  256. try:
  257. insert_query = """
  258. INSERT INTO long_articles_read_rate
  259. (account_name, gh_id, position, read_rate_avg, remark, articles_count, earliest_publish_time, latest_publish_time, dt_version, is_delete, fans)
  260. VALUES
  261. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  262. """
  263. await self.pool.async_save(
  264. query=insert_query,
  265. params=(
  266. account["account_name"],
  267. account["gh_id"],
  268. index,
  269. output_df["read_rate"].mean(),
  270. "从 {} 开始往前计算 31 天".format(start_date),
  271. records,
  272. output_df["pub_dt"].min(),
  273. output_df["pub_dt"].max(),
  274. avg_date.replace("-", ""),
  275. 0,
  276. fans_dict.get(avg_date, 0),
  277. ),
  278. )
  279. except Exception as e:
  280. insert_error_list.append(str(e))
  281. # 入口函数
  282. async def deal(self, end_date: str | None):
  283. if not end_date:
  284. end_date = datetime.now().strftime("%Y-%m-%d")
  285. start_dt = self.generate_stat_duration(end_date)
  286. fans_mapper = await self.get_fans_for_each_date(start_date=start_dt)
  287. accounts = await self.get_publishing_accounts()
  288. for account in tqdm(accounts, desc="计算单个账号阅读率均值"):
  289. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  290. continue
  291. published_articles = await self.get_single_account_published_articles(
  292. gh_id=account["gh_id"],
  293. start_timestamp=int(
  294. datetime.strptime(start_dt, "%Y-%m-%d").timestamp()
  295. ),
  296. )
  297. article_dataframe = await self.cal_read_rate_for_single_account(
  298. publish_details=published_articles,
  299. gh_id=account["gh_id"],
  300. fans_mapper=fans_mapper,
  301. )
  302. if article_dataframe is None:
  303. continue
  304. if article_dataframe.empty:
  305. continue
  306. await self.update_read_rate_avg_for_each_account(
  307. account=account,
  308. start_date=start_dt,
  309. end_dt=end_date,
  310. df=article_dataframe,
  311. fans_dict=fans_mapper.get(account["gh_id"], {}),
  312. )
  313. class AccountPositionReadAvg(AccountPositionReadRateAvg):
  314. # 计算阅读均值置信区间上限
  315. async def cal_read_avg_ci_upper(self, gh_id: str, index: int):
  316. fetch_query = f"""
  317. select read_avg, update_time
  318. from account_avg_info_v3
  319. where gh_id = %s and position = %s
  320. order by update_time desc limit 30;
  321. """
  322. fetch_response_list = await self.pool.async_fetch(
  323. query=fetch_query, db_name="piaoquan_crawler", params=(gh_id, index)
  324. )
  325. read_avg_list = [
  326. i["read_avg"] for i in fetch_response_list if i["read_avg"] is not None
  327. ]
  328. n = len(read_avg_list)
  329. mean = np.mean(read_avg_list)
  330. std = np.std(read_avg_list, ddof=1)
  331. se = std / np.sqrt(n)
  332. t = stats.t.ppf(0.975, df=n - 1)
  333. upper_t = mean + t * se
  334. return upper_t
  335. # 获取账号的阅读率均值信息
  336. async def get_accounts_read_avg(self, dt):
  337. query = """
  338. select gh_id, position, fans, read_rate_avg, fans * read_rate_avg as read_avg
  339. from long_articles_read_rate
  340. where dt_version = %s
  341. """
  342. fetch_result = await self.pool.async_fetch(
  343. query=query, params=(dt.replace("-", ""),)
  344. )
  345. response = {}
  346. for item in fetch_result:
  347. key = f"{item['gh_id']}_{item['position']}"
  348. response[key] = {
  349. "read_rate_avg": item["read_rate_avg"],
  350. "read_avg": item["read_avg"],
  351. "fans": item["fans"],
  352. }
  353. return response
  354. # 计算阅读均值置信区间上限
  355. async def cal_read_avg_detail(
  356. self, account: Dict, dt: str, account_with_read_rate_avg: Dict
  357. ):
  358. for index in self.ARTICLE_INDEX_LIST:
  359. key = f"{account['gh_id']}_{index}"
  360. print(key)
  361. if account_with_read_rate_avg.get(key) is None:
  362. continue
  363. read_avg = account_with_read_rate_avg[key]["read_avg"]
  364. # 计算阅读均值置信区间上限
  365. read_avg_ci_upper = await self.cal_read_avg_ci_upper(
  366. gh_id=account["gh_id"], index=index
  367. )
  368. await self.process_each_record(
  369. account=account,
  370. index=index,
  371. fans=account_with_read_rate_avg[key]["fans"],
  372. read_rate_avg=account_with_read_rate_avg[key]["read_rate_avg"],
  373. read_avg=read_avg,
  374. read_avg_ci_upper=read_avg_ci_upper,
  375. dt=dt,
  376. )
  377. async def process_each_record(
  378. self, account, index, fans, read_rate_avg, read_avg, read_avg_ci_upper, dt
  379. ):
  380. gh_id = account["gh_id"]
  381. account_name = account["account_name"]
  382. business_type = (
  383. self.TOULIU if account_name in self.TOULIU_ACCOUNTS else self.ARTICLES_DAILY
  384. )
  385. # insert into database
  386. insert_sql = f"""
  387. insert into account_avg_info_v3
  388. (gh_id, position, update_time, account_name, fans, read_avg, like_avg, status, account_type,
  389. account_mode, account_source, account_status, business_type, read_rate_avg, read_avg_ci_upper)
  390. values
  391. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  392. """
  393. try:
  394. await self.pool.async_save(
  395. query=insert_sql,
  396. db_name="piaoquan_crawler",
  397. params=(
  398. gh_id,
  399. index,
  400. dt,
  401. account["account_name"],
  402. fans,
  403. read_avg,
  404. self.DEFAULT_LIKE,
  405. self.USING_STATUS,
  406. account["account_type"],
  407. account["mode_type"],
  408. account["account_source"],
  409. account["status"],
  410. business_type,
  411. read_rate_avg,
  412. read_avg_ci_upper,
  413. ),
  414. )
  415. except Exception as e:
  416. print(e)
  417. update_sql = f"""
  418. update account_avg_info_v3
  419. set fans = %s, read_avg = %s, read_rate_avg = %s, read_avg_ci_upper = %s
  420. where gh_id = %s and position = %s and update_time = %s
  421. """
  422. try:
  423. await self.pool.async_save(
  424. query=update_sql,
  425. db_name="piaoquan_crawler",
  426. params=(
  427. fans,
  428. read_avg,
  429. read_rate_avg,
  430. read_avg_ci_upper,
  431. account["gh_id"],
  432. index,
  433. dt,
  434. ),
  435. )
  436. except Exception as e:
  437. print(e)
  438. # 修改前一天的状态为 0
  439. update_status_sql = f"""
  440. UPDATE account_avg_info_v3
  441. SET status = %s
  442. WHERE update_time != %s AND gh_id = %s AND position = %s;
  443. """
  444. await self.pool.async_save(
  445. query=update_status_sql,
  446. db_name="piaoquan_crawler",
  447. params=(self.NOT_USING_STATUS, dt, gh_id, index),
  448. )
  449. async def deal(self, end_date: str | None):
  450. if not end_date:
  451. end_date = datetime.now().strftime("%Y-%m-%d")
  452. dt = (datetime.strptime(end_date, "%Y-%m-%d") - timedelta(days=1)).strftime(
  453. "%Y-%m-%d"
  454. )
  455. account_with_read_rate_avg = await self.get_accounts_read_avg(dt)
  456. accounts = await self.get_publishing_accounts()
  457. for account in tqdm(accounts, desc="计算单个账号的阅读均值"):
  458. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  459. continue
  460. try:
  461. await self.cal_read_avg_detail(
  462. account=account,
  463. dt=dt,
  464. account_with_read_rate_avg=account_with_read_rate_avg,
  465. )
  466. except Exception as e:
  467. print(f"计算账号 {account['account_name']} 阅读均值失败 : {e}")
  468. print(traceback.format_exc())
  469. class AccountPositionOpenRateAvg(AccountPositionReadRateAvg):
  470. async def get_account_open_rate(self, gh_id: str, date_string: str) -> float:
  471. fetch_query = f"""
  472. select
  473. sum(view_count) as 'total_read',
  474. sum(first_level) as 'total_first_level',
  475. sum(first_level) / sum(view_count) as 'avg_open_rate'
  476. from datastat_sort_strategy
  477. where gh_id = '{gh_id}' and date_str between date_sub(str_to_date('{date_string}', '%Y%m%d'), interval {self.STAT_PERIOD} day)
  478. and str_to_date('{date_string}', '%Y%m%d');
  479. """
  480. res = await self.pool.async_fetch(query=fetch_query)
  481. return float(res[0]["avg_open_rate"]) if res else 0.0
  482. async def set_avg_open_rate_for_each_account(
  483. self, gh_id: str, date_string: str, avg_read_rate: float
  484. ) -> int:
  485. update_query = """
  486. update account_avg_info_v3
  487. set open_rate_avg = %s
  488. where gh_id = %s and update_time = %s;
  489. """
  490. return await self.pool.async_save(
  491. query=update_query,
  492. db_name="piaoquan_crawler",
  493. params=(avg_read_rate, gh_id, date_string),
  494. )
  495. async def deal(self, date_string: str | None):
  496. if not date_string:
  497. date_string = datetime.now().strftime("%Y-%m-%d")
  498. dt = (datetime.strptime(date_string, "%Y-%m-%d") - timedelta(days=1)).strftime(
  499. "%Y-%m-%d"
  500. )
  501. account_list = await self.get_publishing_accounts()
  502. for account in tqdm(account_list, desc="计算单个账号的打开率均值"):
  503. if account["gh_id"] in self.FORBIDDEN_GH_IDS:
  504. continue
  505. try:
  506. avg_open_rate = await self.get_account_open_rate(
  507. gh_id=account["gh_id"], date_string=dt.replace("-", "")
  508. )
  509. await self.set_avg_open_rate_for_each_account(
  510. gh_id=account["gh_id"],
  511. date_string=dt,
  512. avg_read_rate=avg_open_rate,
  513. )
  514. except Exception as e:
  515. print(f"计算账号 {account['account_name']} 打开率均值失败 : {e}")
  516. print(traceback.format_exc())
  517. continue