cal_account_read_rate_avg_daily.py 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339
  1. """
  2. @author: luojunhui
  3. cal each account && position reading rate
  4. """
  5. import json
  6. from tqdm import tqdm
  7. from pandas import DataFrame
  8. from argparse import ArgumentParser
  9. from datetime import datetime, timezone, timedelta
  10. from applications import DeNetMysql, PQMySQL, longArticlesMySQL, bot
  11. STATISTICS_PERIOD = 31 * 24 * 60 * 60
  12. def filter_outlier_data(group, key='show_view_count'):
  13. """
  14. :param group:
  15. :param key:
  16. :return:
  17. """
  18. mean = group[key].mean()
  19. std = group[key].std()
  20. # 过滤二倍标准差的数据
  21. filtered_group = group[(group[key] > mean - 2 * std) & (group[key] < mean + 2 * std)]
  22. # 过滤均值倍数大于5的数据
  23. new_mean = filtered_group[key].mean()
  24. # print("阅读均值", new_mean)
  25. filtered_group = filtered_group[filtered_group[key] < new_mean * 5]
  26. return filtered_group
  27. def timestamp_to_str(timestamp) -> str:
  28. """
  29. :param timestamp:
  30. """
  31. dt_object = datetime.utcfromtimestamp(timestamp).replace(tzinfo=timezone.utc).astimezone()
  32. date_string = dt_object.strftime('%Y-%m-%d')
  33. return date_string
  34. def str_to_timestamp(date_string) -> int:
  35. """
  36. :param date_string:
  37. :return:
  38. """
  39. date_obj = datetime.strptime(date_string, '%Y-%m-%d')
  40. # 使用timestamp()方法将datetime对象转换为时间戳
  41. timestamp = date_obj.timestamp()
  42. return int(timestamp)
  43. def get_account_fans_by_dt(db_client) -> dict:
  44. """
  45. 获取每个账号发粉丝,通过日期来区分
  46. :return:
  47. """
  48. sql = f"""
  49. SELECT
  50. t1.date_str,
  51. t1.fans_count,
  52. t2.gh_id
  53. FROM datastat_wx t1
  54. JOIN publish_account t2 ON t1.account_id = t2.id
  55. WHERE
  56. t2.channel = 5
  57. AND t2.status = 1
  58. AND t1.date_str >= '2024-07-01'
  59. ORDER BY t1.date_str;
  60. """
  61. result = db_client.select(sql)
  62. D = {}
  63. for line in result:
  64. dt = line[0]
  65. fans = line[1]
  66. gh_id = line[2]
  67. if D.get(gh_id):
  68. D[gh_id][dt] = fans
  69. else:
  70. D[gh_id] = {dt: fans}
  71. return D
  72. def get_publishing_accounts(db_client) -> list[dict]:
  73. """
  74. 获取每日正在发布的账号
  75. :return:
  76. """
  77. sql = f"""
  78. SELECT DISTINCT
  79. t3.`name`,
  80. t3.gh_id,
  81. t3.follower_count,
  82. t6.account_source_name,
  83. t6.mode_type,
  84. t6.account_type,
  85. t6.`status`
  86. FROM
  87. publish_plan t1
  88. JOIN publish_plan_account t2 ON t1.id = t2.plan_id
  89. JOIN publish_account t3 ON t2.account_id = t3.id
  90. LEFT JOIN publish_account_wx_type t4 on t3.id = t4.account_id
  91. LEFT JOIN wx_statistics_group_source_account t5 on t3.id = t5.account_id
  92. LEFT JOIN wx_statistics_group_source t6 on t5.group_source_name = t6.account_source_name
  93. WHERE
  94. t1.plan_status = 1
  95. AND t3.channel = 5
  96. AND t3.follower_count > 0
  97. GROUP BY t3.id;
  98. """
  99. account_list = db_client.select(sql)
  100. result_list = [
  101. {
  102. "account_name": i[0],
  103. "gh_id": i[1]
  104. } for i in account_list
  105. ]
  106. return result_list
  107. def get_account_articles_detail(db_client, gh_id_tuple) -> list[dict]:
  108. """
  109. get articles details
  110. :return:
  111. """
  112. sql = f"""
  113. SELECT
  114. ghId, accountName, updateTime, ItemIndex, show_view_count
  115. FROM
  116. official_articles_v2
  117. WHERE
  118. ghId IN {gh_id_tuple} and Type = '9';
  119. """
  120. result = db_client.select(sql)
  121. response_list = [
  122. {
  123. "ghId": i[0],
  124. "accountName": i[1],
  125. "updateTime": i[2],
  126. "ItemIndex": i[3],
  127. "show_view_count": i[4]
  128. }
  129. for i in result
  130. ]
  131. return response_list
  132. def cal_account_read_rate(gh_id_tuple) -> DataFrame:
  133. """
  134. 计算账号位置的阅读率
  135. :return:
  136. """
  137. pq_db = PQMySQL()
  138. de_db = DeNetMysql()
  139. response = []
  140. fans_dict_each_day = get_account_fans_by_dt(db_client=de_db)
  141. account_article_detail = get_account_articles_detail(
  142. db_client=pq_db,
  143. gh_id_tuple=gh_id_tuple
  144. )
  145. for line in account_article_detail:
  146. gh_id = line['ghId']
  147. dt = timestamp_to_str(line['updateTime'])
  148. fans = fans_dict_each_day.get(gh_id, {}).get(dt, 0)
  149. line['fans'] = fans
  150. if fans:
  151. line['readRate'] = line['show_view_count'] / fans if fans else 0
  152. response.append(line)
  153. return DataFrame(response,
  154. columns=['ghId', 'accountName', 'updateTime', 'ItemIndex', 'show_view_count', 'readRate'])
  155. def cal_avg_account_read_rate(df, gh_id, index, dt) -> tuple:
  156. """
  157. 计算账号的阅读率均值
  158. :return:
  159. """
  160. max_time = str_to_timestamp(dt)
  161. min_time = max_time - STATISTICS_PERIOD
  162. filterDataFrame = df[
  163. (df["ghId"] == gh_id)
  164. & (min_time <= df["updateTime"])
  165. & (df["updateTime"] <= max_time)
  166. & (df['ItemIndex'] == index)
  167. ]
  168. # print("位置", index)
  169. finalDF = filter_outlier_data(filterDataFrame)
  170. finalDF = finalDF.sort_values(by=['updateTime'], ascending=False)
  171. # if index == 1:
  172. # for i in finalDF.values.tolist():
  173. # print(datetime.fromtimestamp(i[2]).strftime('%Y-%m-%d'), i)
  174. return (
  175. finalDF['readRate'].mean(),
  176. finalDF['updateTime'].max(),
  177. finalDF['updateTime'].min(),
  178. len(finalDF)
  179. )
  180. def check_each_position(db_client, gh_id, index, dt, avg_rate) -> dict:
  181. """
  182. 检验某个具体账号的具体文章的阅读率均值和前段日子的比较
  183. :param avg_rate: 当天计算出的阅读率均值
  184. :param db_client: 数据库连接
  185. :param gh_id: 账号 id
  186. :param index: 账号 index
  187. :param dt:
  188. :return:
  189. """
  190. dt = int(dt.replace("-", ""))
  191. select_sql = f"""
  192. SELECT account_name, read_rate_avg
  193. FROM long_articles_read_rate
  194. WHERE gh_id = '{gh_id}' and position = {index} and dt_version < {dt}
  195. ORDER BY dt_version DESC limit 1;
  196. """
  197. result = db_client.select(select_sql)
  198. if result:
  199. account_name = result[0][0]
  200. previous_read_rate_avg = result[0][1]
  201. relative_value = (avg_rate - previous_read_rate_avg) / previous_read_rate_avg
  202. if -0.05 <= relative_value <= 0.05:
  203. return {}
  204. else:
  205. response = {
  206. "账号名称": account_name,
  207. "位置": index,
  208. "当天阅读率均值": avg_rate,
  209. "前一天阅读率均值": previous_read_rate_avg,
  210. "相对变化率": relative_value
  211. }
  212. return response
  213. def update_single_day(dt, account_list, article_df, lam):
  214. """
  215. 更新单天数据
  216. :param article_df:
  217. :param lam:
  218. :param account_list:
  219. :param dt:
  220. :return:
  221. """
  222. index_list = [1, 2, 3, 4, 5, 6, 7, 8]
  223. error_list = []
  224. for account in tqdm(account_list):
  225. for index in index_list:
  226. avg_rate, max_time, min_time, a_count = cal_avg_account_read_rate(article_df, account['gh_id'], index, dt)
  227. if a_count > 0:
  228. if index in {1, 2}:
  229. error_obj = check_each_position(
  230. db_client=lam,
  231. gh_id=account['gh_id'],
  232. index=index,
  233. dt=dt,
  234. avg_rate=avg_rate
  235. )
  236. if error_obj:
  237. error_list.append(error_obj)
  238. # continue
  239. try:
  240. if avg_rate == 0:
  241. continue
  242. insert_sql = f"""
  243. INSERT INTO long_articles_read_rate
  244. (account_name, gh_id, position, read_rate_avg, remark, articles_count, earliest_publish_time, latest_publish_time, dt_version, is_delete)
  245. values
  246. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  247. """
  248. lam.update(
  249. sql=insert_sql,
  250. params=(
  251. account['account_name'],
  252. account['gh_id'],
  253. index,
  254. avg_rate,
  255. "从 {} 开始往前计算 31 天".format(dt),
  256. a_count,
  257. timestamp_to_str(min_time),
  258. timestamp_to_str(max_time),
  259. dt.replace("-", ""),
  260. 0
  261. )
  262. )
  263. except Exception as e:
  264. print(e)
  265. if error_list:
  266. bot(
  267. title="更新阅读率均值,头次出现异常值通知",
  268. detail={
  269. "时间": dt,
  270. "异常列表": error_list
  271. }
  272. )
  273. def main() -> None:
  274. """
  275. main function
  276. :return:
  277. """
  278. parser = ArgumentParser()
  279. parser.add_argument("--run-date",
  280. help="Run only once for date in format of %Y-%m-%d. \
  281. If no specified, run as daily jobs.")
  282. args = parser.parse_args()
  283. if args.run_date:
  284. dt = args.run_date
  285. else:
  286. dt = datetime.today().strftime('%Y-%m-%d')
  287. lam = longArticlesMySQL()
  288. de = DeNetMysql()
  289. account_list = get_publishing_accounts(db_client=de)
  290. df = cal_account_read_rate(tuple([i['gh_id'] for i in account_list]))
  291. update_single_day(dt, account_list, df, lam)
  292. # start_dt = start_date = datetime(2024, 8, 1)
  293. # end_date = datetime(2024, 10, 22)
  294. # # 计算日期差
  295. # delta = end_date - start_date
  296. # # 生成日期字符串列表
  297. # date_strings = []
  298. # for i in range(delta.days + 1):
  299. # date_strings.append((start_date + timedelta(days=i)).strftime('%Y-%m-%d'))
  300. #
  301. # # 打印结果
  302. # date_str = '2024-09-11'
  303. # date_strings = [date_str,]
  304. # for date_str in tqdm(date_strings):
  305. # update_single_day(date_str, account_list, df, lam)
  306. if __name__ == '__main__':
  307. main()