updateMinigramInfoDaily.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452
  1. """
  2. @author luojunhui
  3. @description Update Minigram Info Daily
  4. """
  5. import time
  6. import traceback
  7. from tqdm import tqdm
  8. from datetime import datetime, timedelta
  9. import schedule
  10. from argparse import ArgumentParser
  11. from applications import WeixinSpider, Functions, log, bot
  12. from applications.db import DatabaseConnector
  13. from config import long_articles_config, piaoquan_crawler_config
  14. TASK_NAME = "updateMinigramInfoDaily"
  15. SPIDER_SUCCESS_STATUS = 0
  16. def get_yesterday():
  17. """
  18. get yesterday date
  19. :return:
  20. """
  21. yesterday = datetime.today() - timedelta(1)
  22. return yesterday
  23. class DailyDataManager(object):
  24. """
  25. daily 数据每日更新
  26. """
  27. def __init__(self):
  28. self.piaoquan_crawler_db_client = None
  29. self.long_articles_db_client = None
  30. self.spider = WeixinSpider()
  31. def init_database(self) -> None:
  32. """
  33. init database connector
  34. :return:
  35. """
  36. # 初始化数据库连接
  37. try:
  38. self.piaoquan_crawler_db_client = DatabaseConnector(piaoquan_crawler_config)
  39. self.piaoquan_crawler_db_client.connect()
  40. self.long_articles_db_client = DatabaseConnector(long_articles_config)
  41. self.long_articles_db_client.connect()
  42. except Exception as e:
  43. error_msg = traceback.format_exc()
  44. bot(
  45. title="更新小程序裂变信息任务连接数据库失败",
  46. detail={
  47. "error": e,
  48. "msg": error_msg
  49. }
  50. )
  51. return
  52. def get_published_articles(self, biz_date):
  53. """
  54. 获取已经发布的文章的信息, updateTime 选择为前一天的 0 点并且转化为时间戳
  55. :return:
  56. """
  57. biz_date_midnight = datetime(year=biz_date.year, month=biz_date.month, day=biz_date.day)
  58. biz_date_ts = biz_date_midnight.timestamp()
  59. biz_date_end_ts = biz_date_ts + 24 * 60 * 60 - 1
  60. sql2 = f"""
  61. select ContentUrl, wx_sn, publish_timestamp, accountName, title
  62. from official_articles_v2
  63. where publish_timestamp between {biz_date_ts} and {biz_date_end_ts};
  64. """
  65. result_list = self.piaoquan_crawler_db_client.fetch(sql2)
  66. log(
  67. task=TASK_NAME,
  68. function="get_published_articles",
  69. message="一共获取 {} 篇文章数据".format(len(result_list))
  70. )
  71. return result_list
  72. def update_article_info(self, line):
  73. """
  74. update info into mysql
  75. :return:
  76. """
  77. url = line[0]
  78. update_time = line[2]
  79. wx_sn = line[1].decode()
  80. article_detail = self.get_root_source_ids(line)
  81. if article_detail:
  82. response_code = article_detail['code']
  83. if response_code == SPIDER_SUCCESS_STATUS:
  84. mini_info = article_detail['data']['data']['mini_program']
  85. if mini_info:
  86. log(
  87. task=TASK_NAME,
  88. function="get_root_source_ids",
  89. message="获取文章链接对应的 rootSourceId 成功",
  90. data={
  91. "ContentUrl": url,
  92. "wxSn": wx_sn,
  93. "updateTime": update_time,
  94. "miniInfo": mini_info
  95. }
  96. )
  97. try:
  98. dt_object = datetime.fromtimestamp(update_time)
  99. publish_dt = dt_object.strftime('%Y-%m-%d')
  100. one_day = timedelta(days=1)
  101. two_day = timedelta(days=2)
  102. next_day = dt_object + one_day
  103. next_next_day = dt_object + two_day
  104. recall_dt_list = [dt_object, next_day, next_next_day]
  105. recall_dt_str_list = [i.strftime('%Y-%m-%d') for i in recall_dt_list]
  106. for dt_str in recall_dt_str_list:
  107. for index, item in enumerate(mini_info, 1):
  108. image_url = item['image_url']
  109. nick_name = item['nike_name']
  110. root_source_id = item['path'].split("rootSourceId%3D")[-1]
  111. video_id = item['path'].split("videos%3Fid%3D")[1].split("%26su%3D")[0]
  112. kimi_title = item['title']
  113. # print(image_url, nick_name, root_source_id, video_id, kimi_title)
  114. insert_sql = f"""
  115. INSERT INTO long_articles_detail_info
  116. (wx_sn, mini_title, mini_name, cover_url, video_index, root_source_id, video_id, publish_dt, recall_dt)
  117. values
  118. (%s, %s, %s, %s, %s, %s, %s, %s, %s);
  119. """
  120. self.piaoquan_crawler_db_client.save(
  121. query=insert_sql,
  122. params=(
  123. wx_sn,
  124. kimi_title,
  125. nick_name,
  126. image_url,
  127. index,
  128. root_source_id,
  129. video_id,
  130. publish_dt,
  131. dt_str
  132. )
  133. )
  134. log(
  135. task=TASK_NAME,
  136. function="update_article_info",
  137. message="插入数据成功, video_id 是: {}".format(video_id)
  138. )
  139. except Exception as e:
  140. error_msg = traceback.format_exc()
  141. log(
  142. task=TASK_NAME,
  143. function="update_article_info",
  144. status="fail",
  145. message="插入数据失败, 失败原因是{}--{}".format(e, error_msg)
  146. )
  147. return None
  148. else:
  149. return line
  150. else:
  151. return line
  152. def get_root_source_ids(self, data_info):
  153. """
  154. 通过抓取接口获取 data_info
  155. :return:
  156. """
  157. url = data_info[0]
  158. try:
  159. article_detail = self.spider.get_article_text(url)
  160. return article_detail
  161. except Exception as e:
  162. log(
  163. task=TASK_NAME,
  164. function="get_root_source_ids",
  165. status="fail",
  166. message="获取文章链接对应的 rootSourceId失败, 报错信息是: {}".format(e),
  167. data={
  168. "ContentUrl": url
  169. }
  170. )
  171. return False
  172. def get_minigram_info(self, rootSourceId):
  173. """
  174. :param rootSourceId:
  175. :return:
  176. """
  177. sql = f"""
  178. select type, machinecode, create_time, first_level_dt
  179. from changwen_data_base_v2
  180. where rootsourceid = '{rootSourceId}';
  181. """
  182. result_list = self.long_articles_db_client.fetch(sql)
  183. def summarize(values):
  184. """
  185. :param values:
  186. :return:
  187. """
  188. L = {}
  189. first_level = {}
  190. fission_level = {}
  191. for line in values:
  192. # 先统计首层
  193. if line[0] == '首层':
  194. try:
  195. dt = str(line[-1])
  196. key_dt = datetime.strptime(dt, '%Y%m%d').strftime('%Y-%m-%d')
  197. if first_level.get(key_dt):
  198. first_level[key_dt].add(line[1])
  199. else:
  200. first_level[key_dt] = {line[1]}
  201. except Exception as e:
  202. continue
  203. else:
  204. try:
  205. dt = str(line[-1])
  206. first_level_dt = datetime.strptime(dt, '%Y%m%d')
  207. create_level_dt = line[-2]
  208. delta = create_level_dt - first_level_dt
  209. days = int(delta.days)
  210. key_dt = datetime.strptime(dt, '%Y%m%d').strftime('%Y-%m-%d')
  211. if fission_level.get(key_dt):
  212. fission_level[key_dt].append((line[1], days))
  213. else:
  214. fission_level[key_dt] = [(line[1], days)]
  215. except Exception as e:
  216. continue
  217. # print("first level dt is NULL")
  218. tt = {}
  219. for key in fission_level:
  220. detail_list = fission_level[key]
  221. temp = {}
  222. for item in detail_list:
  223. mid, days = item
  224. if temp.get(days):
  225. temp[days].add(mid)
  226. else:
  227. temp[days] = {mid}
  228. final = {}
  229. for sub_key in temp:
  230. length = len(temp[sub_key])
  231. final[sub_key] = length
  232. tt[key] = final
  233. for key in first_level:
  234. temp = [len(first_level[key]), tt.get(key, {}).get(0, 0), tt.get(key, {}).get(1, 0),
  235. tt.get(key, {}).get(2, 0)]
  236. L[key] = temp
  237. return L
  238. try:
  239. response = summarize(result_list)
  240. log(
  241. task=TASK_NAME,
  242. function="get_minigram_info",
  243. message="计算source_id信息成功",
  244. data=response
  245. )
  246. return response
  247. except Exception as e:
  248. log(
  249. task=TASK_NAME,
  250. function="get_minigram_info",
  251. message="获取 source_id信息失败, 报错信息是: {}".format(e),
  252. status="fail"
  253. )
  254. return None
  255. def update_minigram_detail(self, biz_date):
  256. """
  257. :return:
  258. """
  259. # 获取三天前的日期
  260. date_begin = biz_date - timedelta(days=3)
  261. datestr_begin = date_begin.strftime("%Y-%m-%d")
  262. datestr_end = biz_date.strftime("%Y-%m-%d")
  263. sql = f"""
  264. select distinct root_source_id
  265. from long_articles_detail_info
  266. where publish_dt between '{datestr_begin}' and '{datestr_end}';
  267. """
  268. source_id_list = self.piaoquan_crawler_db_client.fetch(sql)
  269. log(
  270. task=TASK_NAME,
  271. function="update_minigram_detail",
  272. message="获取前三天的 rootSourceId, 一共有 {} 条记录".format(len(source_id_list))
  273. )
  274. fail_count = 0
  275. for item in tqdm(source_id_list):
  276. s_id = item[0]
  277. try:
  278. result = self.get_minigram_info(s_id)
  279. for key in result:
  280. recall_dt = key
  281. first_level = result[key][0]
  282. fission_0 = result[key][1]
  283. fission_1 = result[key][2]
  284. fission_2 = result[key][3]
  285. # print(s_id, recall_dt, first_level, fission_0, fission_1, fission_2)
  286. update_sql = f"""
  287. UPDATE long_articles_detail_info
  288. set first_level = %s, fission_0 = %s, fission_1 = %s, fission_2 = %s
  289. where root_source_id = %s and recall_dt = %s;
  290. """
  291. try:
  292. self.piaoquan_crawler_db_client.save(
  293. query=update_sql,
  294. params=(
  295. first_level, fission_0, fission_1, fission_2, s_id, recall_dt
  296. )
  297. )
  298. except Exception as e:
  299. log(
  300. task=TASK_NAME,
  301. function="update_minigram_detail",
  302. status="fail",
  303. message="mysql 更新失败, 报错信息是 {}".format(e)
  304. )
  305. except Exception as e:
  306. log(
  307. task=TASK_NAME,
  308. function="update_minigram_detail",
  309. status="fail",
  310. message="更新单条数据失败, 报错信息是 {}".format(e),
  311. data={"error_msg": traceback.format_exc()}
  312. )
  313. fail_count += 1
  314. if fail_count:
  315. bot(
  316. title="{} fail because of lam db error".format(TASK_NAME),
  317. detail={
  318. "fail_count": fail_count
  319. }
  320. )
  321. def updateArticlesJob(biz_date=None):
  322. """
  323. 更新文章数据
  324. :return:
  325. """
  326. if not biz_date:
  327. biz_date = get_yesterday()
  328. data_manager = DailyDataManager()
  329. data_manager.init_database()
  330. article_list = data_manager.get_published_articles(biz_date)
  331. failed_article_list = []
  332. for article in tqdm(article_list):
  333. failed_article = data_manager.update_article_info(article)
  334. if failed_article:
  335. failed_article_list.append(failed_article)
  336. # 重试
  337. second_try_fail_article_list = []
  338. if failed_article_list:
  339. for article in tqdm(failed_article_list):
  340. second_failed_article = data_manager.update_article_info(article)
  341. if second_failed_article:
  342. second_try_fail_article_list.append(second_failed_article)
  343. log(
  344. task=TASK_NAME,
  345. function="updateArticlesJob",
  346. message="文章更新完成---{}".format(biz_date.__str__())
  347. )
  348. bot(
  349. title="更新文章任务完成",
  350. detail={
  351. "finish_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
  352. },
  353. mention=False
  354. )
  355. if second_try_fail_article_list:
  356. bot(
  357. title="更新文章任务存在文章抓取失败",
  358. detail=[
  359. {
  360. "account": line[3],
  361. "title": line[4],
  362. "url": line[0]
  363. }
  364. for line in second_try_fail_article_list
  365. ]
  366. )
  367. def updateMinigramInfoJob(biz_date=None):
  368. """
  369. 更新前三天小程序数据
  370. :return:
  371. """
  372. if not biz_date:
  373. biz_date = get_yesterday()
  374. data_manager = DailyDataManager()
  375. data_manager.init_database()
  376. try:
  377. data_manager.update_minigram_detail(biz_date)
  378. log(
  379. task=TASK_NAME,
  380. function="updateMinigramInfoJob",
  381. message="小程序更新完成---{}".format(biz_date.__str__())
  382. )
  383. except Exception as e:
  384. log(
  385. task=TASK_NAME,
  386. function="updateMinigramInfoJob",
  387. status="fail",
  388. message="小程序更新失败---{}, 报错信息是: {}".format(biz_date.__str__(), e)
  389. )
  390. bot(
  391. title="更新小程序信息任务完成",
  392. detail={
  393. "finish_time": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
  394. },
  395. mention=False
  396. )
  397. def main():
  398. """
  399. main function
  400. :return:
  401. """
  402. parser = ArgumentParser()
  403. parser.add_argument("--run-date",
  404. help="Run only once for date in format of %Y%m%d. \
  405. If no specified, run as daily jobs.")
  406. args = parser.parse_args()
  407. if args.run_date:
  408. biz_date = datetime.strptime(args.run_date, "%Y%m%d")
  409. print("Run in manual mode. Date: {}".format(args.run_date))
  410. updateArticlesJob(biz_date)
  411. updateMinigramInfoJob(biz_date)
  412. return
  413. else:
  414. print("Run in daily mode.")
  415. schedule.every().day.at("01:30").do(Functions().job_with_thread, updateArticlesJob)
  416. schedule.every().day.at("03:30").do(Functions().job_with_thread, updateMinigramInfoJob)
  417. while True:
  418. schedule.run_pending()
  419. time.sleep(1)
  420. if __name__ == '__main__':
  421. main()