update_published_articles_read_detail.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659
  1. """
  2. @author: luojunhui
  3. @desc: 更新文章的阅读详情
  4. """
  5. import json
  6. import time
  7. import traceback
  8. import urllib.parse
  9. from datetime import datetime
  10. from typing import Dict, List
  11. from pymysql.cursors import DictCursor
  12. from tqdm import tqdm
  13. from applications import aiditApi
  14. from applications import bot
  15. from applications import create_feishu_columns_sheet
  16. from applications import Functions
  17. from applications import log
  18. from applications import WeixinSpider
  19. from applications.const import updatePublishedMsgTaskConst
  20. from applications.db import DatabaseConnector
  21. from config import denet_config, long_articles_config, piaoquan_crawler_config
  22. ARTICLE_TABLE = "official_articles"
  23. const = updatePublishedMsgTaskConst()
  24. spider = WeixinSpider()
  25. functions = Functions()
  26. def generate_bot_columns():
  27. """
  28. 生成列
  29. :return:
  30. """
  31. columns = [
  32. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="name", display_name="公众号名称"),
  33. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="ghId", display_name="ghId"),
  34. create_feishu_columns_sheet(sheet_type="number", sheet_name="follower_count", display_name="粉丝数"),
  35. create_feishu_columns_sheet(sheet_type="date", sheet_name="account_init_timestamp",
  36. display_name="账号接入系统时间"),
  37. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="using_status", display_name="利用状态")
  38. ]
  39. return columns
  40. class UpdatePublishedArticlesReadDetail(object):
  41. """
  42. 更新每日发布文章的阅读详情
  43. """
  44. def __init__(self):
  45. self.aigc_db_client = None
  46. self.piaoquan_crawler_db_client = None
  47. self.long_articles_db_client = None
  48. def get_account_list(self) -> List[Dict]:
  49. """
  50. 从 aigc 数据库中获取目前处于发布状态的账号
  51. :return:
  52. "name": line[0],
  53. "ghId": line[1],
  54. "follower_count": line[2],
  55. "account_init_time": int(line[3] / 1000),
  56. "account_type": line[4], # 订阅号 or 服务号
  57. "account_auth": line[5]
  58. """
  59. def get_account_status() -> Dict:
  60. """
  61. 获取账号的实验状态
  62. :return:
  63. """
  64. sql = f"""
  65. SELECT t1.account_id, t2.status
  66. FROM wx_statistics_group_source_account t1
  67. JOIN wx_statistics_group_source t2
  68. ON t1.group_source_name = t2.account_source_name;
  69. """
  70. account_status_list = self.aigc_db_client.fetch(sql, cursor_type=DictCursor)
  71. account_status = {account['account_id']: account['status'] for account in account_status_list}
  72. return account_status
  73. account_list_with_out_using_status = aiditApi.get_publish_account_from_aigc()
  74. account_status_dict = get_account_status()
  75. account_list = [
  76. {
  77. **item,
  78. 'using_status': 0 if account_status_dict.get(item['account_id']) == '实验' else 1
  79. }
  80. for item in account_list_with_out_using_status
  81. ]
  82. return account_list
  83. def init_database(self):
  84. """
  85. 初始化数据库连接
  86. """
  87. # 初始化数据库连接
  88. try:
  89. self.piaoquan_crawler_db_client = DatabaseConnector(piaoquan_crawler_config)
  90. self.piaoquan_crawler_db_client.connect()
  91. self.aigc_db_client = DatabaseConnector(denet_config)
  92. self.aigc_db_client.connect()
  93. self.long_articles_db_client = DatabaseConnector(long_articles_config)
  94. self.long_articles_db_client.connect()
  95. except Exception as e:
  96. error_msg = traceback.format_exc()
  97. bot(
  98. title="更新文章任务连接数据库失败",
  99. detail={
  100. "error": e,
  101. "msg": error_msg
  102. }
  103. )
  104. return
  105. def insert_each_msg(self, account_info: Dict, msg_list: List[Dict]) -> None:
  106. """
  107. 把消息数据更新到数据库中
  108. :param account_info:
  109. :param msg_list:
  110. :return:
  111. """
  112. gh_id = account_info['ghId']
  113. account_name = account_info['name']
  114. for info in msg_list:
  115. baseInfo = info.get("BaseInfo", {})
  116. appMsgId = info.get("AppMsg", {}).get("BaseInfo", {}).get("AppMsgId", None)
  117. createTime = info.get("AppMsg", {}).get("BaseInfo", {}).get("CreateTime", None)
  118. updateTime = info.get("AppMsg", {}).get("BaseInfo", {}).get("UpdateTime", None)
  119. Type = info.get("AppMsg", {}).get("BaseInfo", {}).get("Type", None)
  120. detail_article_list = info.get("AppMsg", {}).get("DetailInfo", [])
  121. if detail_article_list:
  122. for article in detail_article_list:
  123. title = article.get("Title", None)
  124. Digest = article.get("Digest", None)
  125. ItemIndex = article.get("ItemIndex", None)
  126. ContentUrl = article.get("ContentUrl", None)
  127. SourceUrl = article.get("SourceUrl", None)
  128. CoverImgUrl = article.get("CoverImgUrl", None)
  129. CoverImgUrl_1_1 = article.get("CoverImgUrl_1_1", None)
  130. CoverImgUrl_235_1 = article.get("CoverImgUrl_235_1", None)
  131. ItemShowType = article.get("ItemShowType", None)
  132. IsOriginal = article.get("IsOriginal", None)
  133. ShowDesc = article.get("ShowDesc", None)
  134. show_stat = functions.show_desc_to_sta(ShowDesc)
  135. ori_content = article.get("ori_content", None)
  136. show_view_count = show_stat.get("show_view_count", 0)
  137. show_like_count = show_stat.get("show_like_count", 0)
  138. show_zs_count = show_stat.get("show_zs_count", 0)
  139. show_pay_count = show_stat.get("show_pay_count", 0)
  140. wx_sn = ContentUrl.split("&sn=")[1].split("&")[0] if ContentUrl else None
  141. status = account_info['using_status']
  142. info_tuple = (
  143. gh_id,
  144. account_name,
  145. appMsgId,
  146. title,
  147. Type,
  148. createTime,
  149. updateTime,
  150. Digest,
  151. ItemIndex,
  152. ContentUrl,
  153. SourceUrl,
  154. CoverImgUrl,
  155. CoverImgUrl_1_1,
  156. CoverImgUrl_235_1,
  157. ItemShowType,
  158. IsOriginal,
  159. ShowDesc,
  160. ori_content,
  161. show_view_count,
  162. show_like_count,
  163. show_zs_count,
  164. show_pay_count,
  165. wx_sn,
  166. json.dumps(baseInfo, ensure_ascii=False),
  167. functions.str_to_md5(title),
  168. status
  169. )
  170. self.insert_each_article(
  171. info_tuple=info_tuple,
  172. show_view_count=show_view_count,
  173. show_like_count=show_like_count,
  174. wx_sn=wx_sn
  175. )
  176. def insert_each_article(self, info_tuple, show_view_count, show_like_count, wx_sn):
  177. """
  178. 插入每一篇文章
  179. """
  180. try:
  181. insert_sql = f"""
  182. INSERT INTO {ARTICLE_TABLE}
  183. (ghId, accountName, appMsgId, title, Type, createTime, updateTime, Digest, ItemIndex, ContentUrl, SourceUrl, CoverImgUrl, CoverImgUrl_1_1, CoverImgUrl_255_1, ItemShowType, IsOriginal, ShowDesc, ori_content, show_view_count, show_like_count, show_zs_count, show_pay_count, wx_sn, baseInfo, title_md5, status)
  184. values
  185. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  186. """
  187. self.piaoquan_crawler_db_client.save(query=insert_sql, params=info_tuple)
  188. log(
  189. task="updatePublishedMsgDaily",
  190. function="insert_each_msg",
  191. message="插入文章数据成功",
  192. data={
  193. "info": info_tuple
  194. }
  195. )
  196. except Exception as e:
  197. try:
  198. update_sql = f"""
  199. UPDATE {ARTICLE_TABLE}
  200. SET show_view_count = %s, show_like_count=%s
  201. WHERE wx_sn = %s;
  202. """
  203. self.piaoquan_crawler_db_client.save(query=update_sql,
  204. params=(show_view_count, show_like_count, wx_sn))
  205. log(
  206. task="updatePublishedMsgDaily",
  207. function="insert_each_msg",
  208. message="更新文章数据成功",
  209. data={
  210. "wxSn": wx_sn,
  211. "likeCount": show_like_count,
  212. "viewCount": show_view_count
  213. }
  214. )
  215. except Exception as e:
  216. log(
  217. task="updatePublishedMsgDaily",
  218. function="insert_each_msg",
  219. message="更新文章失败, 报错原因是: {}".format(e),
  220. status="fail"
  221. )
  222. def update_account_by_spider(self, account_info: Dict, cursor=None):
  223. """
  224. 更新每一个账号信息
  225. :param account_info:
  226. :param cursor:
  227. :return: None
  228. """
  229. gh_id = account_info['ghId']
  230. latest_update_time = self.get_account_info(gh_id)
  231. response = spider.update_msg_list(ghId=gh_id, index=cursor)
  232. msg_list = response.get("data", {}).get("data", [])
  233. if msg_list:
  234. # do
  235. last_article_in_this_msg = msg_list[-1]
  236. last_time_stamp_in_this_msg = last_article_in_this_msg['AppMsg']['BaseInfo']['UpdateTime']
  237. last_url = last_article_in_this_msg['AppMsg']['DetailInfo'][0]['ContentUrl']
  238. resdata = spider.get_account_by_url(last_url)
  239. check_id = resdata['data'].get('data', {}).get('wx_gh')
  240. if check_id == gh_id:
  241. self.insert_each_msg(
  242. account_info=account_info,
  243. msg_list=msg_list
  244. )
  245. # if last_time_stamp_in_this_msg > latest_update_time:
  246. # next_cursor = response['data']['next_cursor']
  247. # return self.update_account_by_spider(
  248. # account_info=account_info,
  249. # cursor=next_cursor
  250. # )
  251. log(
  252. task="updatePublishedMsgDaily",
  253. function="update_each_account",
  254. message="账号文章更新成功",
  255. data=response
  256. )
  257. else:
  258. log(
  259. task="updatePublishedMsgDaily",
  260. function="update_each_account",
  261. message="账号文章更新失败",
  262. status="fail",
  263. data=response
  264. )
  265. return
  266. def update_account_by_aigc(self, account_info: Dict, run_date: str):
  267. """
  268. 更新单个账号的文章
  269. """
  270. gh_id = account_info['ghId']
  271. select_sql = f"""
  272. SELECT published_url, publish_timestamp, root_source_id_list, create_timestamp
  273. FROM long_articles_published_trace_id
  274. WHERE gh_id = '{gh_id}' AND publish_timestamp> UNIX_TIMESTAMP(DATE_SUB('{run_date}', INTERVAL 3 DAY));
  275. """
  276. result = self.long_articles_db_client.fetch(select_sql, cursor_type=DictCursor)
  277. for article in result:
  278. published_url = article['published_url']
  279. article_info = spider.get_article_text(content_link=published_url, is_cache=False, is_count=True)
  280. response_code = article_info['code']
  281. if response_code == 0:
  282. response_data = article_info['data']['data']
  283. title = response_data['title']
  284. article_url = response_data['content_link']
  285. show_view_count = response_data['view_count']
  286. show_like_count = response_data['like_count']
  287. show_zs_count = 0
  288. show_pay_count = 0
  289. wx_sn = article_url.split("&sn=")[1].split("&")[0] if article_url else None
  290. app_msg_id = article_url.split("&mid=")[1].split("&")[0] if article_url else None
  291. status = account_info['using_status']
  292. info_tuple = (
  293. gh_id,
  294. account_info['name'],
  295. app_msg_id,
  296. title,
  297. "9",
  298. article['create_timestamp'],
  299. response_data['update_timestamp'],
  300. None,
  301. response_data['item_index'],
  302. response_data['content_link'],
  303. None,
  304. None,
  305. None,
  306. None,
  307. None,
  308. response_data.get("is_original", None),
  309. None,
  310. None,
  311. show_view_count,
  312. show_like_count,
  313. show_zs_count,
  314. show_pay_count,
  315. wx_sn,
  316. None,
  317. functions.str_to_md5(title),
  318. status
  319. )
  320. self.insert_each_article(
  321. info_tuple=info_tuple,
  322. show_view_count=show_view_count,
  323. show_like_count=show_like_count,
  324. wx_sn=wx_sn
  325. )
  326. def get_account_info(self, gh_id: str) -> int:
  327. """
  328. 通过 gh_id查询账号信息的最新发布时间
  329. :param gh_id:
  330. :return:
  331. """
  332. sql = f"""
  333. SELECT MAX(publish_timestamp)
  334. FROM {ARTICLE_TABLE}
  335. WHERE ghId = '{gh_id}';
  336. """
  337. result = self.piaoquan_crawler_db_client.fetch(sql)
  338. if result:
  339. return result[0][0]
  340. else:
  341. # 新号,抓取周期定位抓取时刻往前推30天
  342. return int(time.time()) - const.NEW_ACCOUNT_CRAWL_PERIOD
  343. def check_single_account(self, account_item: Dict) -> bool:
  344. """
  345. 校验每个账号是否更新
  346. :param account_item:
  347. :return: True / False
  348. """
  349. gh_id = account_item['ghId']
  350. account_type = account_item['account_type']
  351. today_str = datetime.today().strftime("%Y-%m-%d")
  352. today_date_time = datetime.strptime(today_str, "%Y-%m-%d")
  353. today_timestamp = today_date_time.timestamp()
  354. sql = f"""
  355. SELECT max(updateTime)
  356. FROM {ARTICLE_TABLE}
  357. WHERE ghId = '{gh_id}';
  358. """
  359. try:
  360. latest_update_time = self.piaoquan_crawler_db_client.fetch(sql)[0][0]
  361. # 判断该账号当天发布的文章是否被收集
  362. if account_type in const.SUBSCRIBE_TYPE_SET:
  363. if int(latest_update_time) > int(today_timestamp):
  364. return True
  365. else:
  366. return False
  367. else:
  368. if int(latest_update_time) > int(today_timestamp) - 7 * 24 * 3600:
  369. return True
  370. else:
  371. return False
  372. except Exception as e:
  373. print(e)
  374. return False
  375. def process_single_account(self, account_info: Dict, run_date: str):
  376. """
  377. 处理单个账号
  378. """
  379. gh_id = account_info['ghId']
  380. # 判断该账号当天是否有自动群发且没有无限流发表
  381. select_sql = f"""
  382. SELECT push_type
  383. FROM long_articles_published_trace_id
  384. WHERE gh_id = '{gh_id}' AND publish_timestamp > UNIX_TIMESTAMP(CURDATE());
  385. """
  386. response = self.long_articles_db_client.fetch(select_sql, cursor_type=DictCursor)
  387. UNLIMITED_PUSH = 3
  388. if response:
  389. unlimited_push_list = [item for item in response if item['push_type'] == UNLIMITED_PUSH]
  390. if unlimited_push_list:
  391. self.update_account_by_spider(account_info=account_info)
  392. else:
  393. self.update_account_by_aigc(account_info=account_info, run_date=run_date)
  394. else:
  395. self.update_account_by_spider(account_info=account_info)
  396. def update_publish_timestamp(self, article_info: Dict):
  397. """
  398. 更新发布时间戳 && minigram 信息
  399. :param article_info:
  400. :return:
  401. """
  402. url = article_info['ContentUrl']
  403. wx_sn = article_info['wx_sn']
  404. try:
  405. response = spider.get_article_text(url)
  406. response_code = response['code']
  407. if response_code == const.ARTICLE_DELETE_CODE:
  408. publish_timestamp_s = const.DELETE_STATUS
  409. root_source_id_list = []
  410. elif response_code == const.ARTICLE_ILLEGAL_CODE:
  411. publish_timestamp_s = const.ILLEGAL_STATUS
  412. root_source_id_list = []
  413. elif response_code == const.ARTICLE_SUCCESS_CODE:
  414. data = response['data']['data']
  415. publish_timestamp_ms = data['publish_timestamp']
  416. publish_timestamp_s = int(publish_timestamp_ms / 1000)
  417. mini_program = data.get('mini_program', [])
  418. if mini_program:
  419. root_source_id_list = [
  420. urllib.parse.parse_qs(
  421. urllib.parse.unquote(i['path'])
  422. )['rootSourceId'][0]
  423. for i in mini_program
  424. ]
  425. else:
  426. root_source_id_list = []
  427. else:
  428. publish_timestamp_s = const.UNKNOWN_STATUS
  429. root_source_id_list = []
  430. except Exception as e:
  431. publish_timestamp_s = const.REQUEST_FAIL_STATUS
  432. root_source_id_list = None
  433. error_msg = traceback.format_exc()
  434. print(e, error_msg)
  435. update_sql = f"""
  436. UPDATE {ARTICLE_TABLE}
  437. SET publish_timestamp = %s, root_source_id_list = %s
  438. WHERE wx_sn = %s;
  439. """
  440. self.piaoquan_crawler_db_client.save(
  441. query=update_sql,
  442. params=(
  443. publish_timestamp_s,
  444. json.dumps(root_source_id_list, ensure_ascii=False),
  445. wx_sn
  446. ))
  447. if publish_timestamp_s == const.REQUEST_FAIL_STATUS:
  448. return article_info
  449. else:
  450. return None
  451. def update_job(self, biz_date: str = None):
  452. """
  453. 执行更新任务
  454. """
  455. account_list = self.get_account_list()
  456. if not biz_date:
  457. biz_date = datetime.today().strftime('%Y-%m-%d')
  458. # 处理订阅号
  459. subscription_accounts = [i for i in account_list if i['account_type'] in const.SUBSCRIBE_TYPE_SET]
  460. success_count = 0
  461. fail_count = 0
  462. for account in tqdm(subscription_accounts):
  463. try:
  464. self.process_single_account(account_info=account, run_date=biz_date)
  465. success_count += 1
  466. time.sleep(5)
  467. except Exception as e:
  468. fail_count += 1
  469. log(
  470. task="updatePublishedMsgDaily",
  471. function="update_job",
  472. message="单个账号文章更新失败, 报错信息是: {}".format(e),
  473. status="fail",
  474. )
  475. log(
  476. task="updatePublishedMsgDaily",
  477. function="update_job",
  478. message="订阅号更新完成",
  479. data={
  480. "success": success_count,
  481. "fail": fail_count
  482. }
  483. )
  484. if fail_count / (success_count + fail_count) > const.SUBSCRIBE_FAIL_RATE_THRESHOLD:
  485. bot(
  486. title="订阅号超过 {}% 的账号更新失败".format(int(const.SUBSCRIBE_FAIL_RATE_THRESHOLD * 100)),
  487. detail={
  488. "success": success_count,
  489. "fail": fail_count,
  490. "failRate": fail_count / (success_count + fail_count)
  491. }
  492. )
  493. bot(
  494. title="更新每日发布文章任务完成通知",
  495. detail={
  496. "msg": "订阅号更新完成",
  497. "finish_time": datetime.today().__str__()
  498. },
  499. mention=False
  500. )
  501. # 服务号
  502. server_accounts = [i for i in account_list if i['account_type'] == const.SERVICE_TYPE]
  503. for account in tqdm(server_accounts):
  504. try:
  505. self.process_single_account(account_info=account, run_date=biz_date)
  506. time.sleep(5)
  507. except Exception as e:
  508. print(e)
  509. bot(
  510. title="更新每日发布文章任务完成通知",
  511. detail={
  512. "msg": "服务号更新完成",
  513. "finish_time": datetime.today().__str__()
  514. },
  515. mention=False
  516. )
  517. def check_job(self, biz_date: str = None):
  518. """
  519. 执行检查任务,check each account
  520. """
  521. if not biz_date:
  522. biz_date = datetime.today().strftime('%Y-%m-%d')
  523. account_list = self.get_account_list()
  524. subscription_accounts = [i for i in account_list if i['account_type'] in const.SUBSCRIBE_TYPE_SET]
  525. fail_list = []
  526. # check and rework if fail
  527. for sub_item in tqdm(subscription_accounts):
  528. res = self.check_single_account(sub_item)
  529. if not res:
  530. self.process_single_account(sub_item, biz_date)
  531. # check whether success and bot if fails
  532. for sub_item in tqdm(subscription_accounts):
  533. res = self.check_single_account(sub_item)
  534. if not res:
  535. # 去掉三个不需要查看的字段
  536. sub_item.pop('account_type', None)
  537. sub_item.pop('account_auth', None)
  538. sub_item.pop('account_id', None)
  539. fail_list.append(sub_item)
  540. if fail_list:
  541. try:
  542. bot(
  543. title="更新当天发布文章,存在未更新的账号",
  544. detail={
  545. "columns": generate_bot_columns(),
  546. "rows": fail_list
  547. },
  548. table=True
  549. )
  550. except Exception as e:
  551. print("Timeout Error: {}".format(e))
  552. else:
  553. bot(
  554. title="更新当天发布文章,所有账号均更新成功",
  555. mention=False,
  556. detail={
  557. "msg": "校验任务完成",
  558. "finish_time": datetime.today().__str__()
  559. }
  560. )
  561. def get_article_detail_job(self):
  562. """
  563. 获取发布文章详情
  564. :return:
  565. """
  566. select_sql = f"""
  567. SELECT ContentUrl, wx_sn
  568. FROM {ARTICLE_TABLE}
  569. WHERE publish_timestamp in {(const.DEFAULT_STATUS, const.REQUEST_FAIL_STATUS)};
  570. """
  571. article_list = self.piaoquan_crawler_db_client.fetch(select_sql, cursor_type=DictCursor)
  572. for article in tqdm(article_list):
  573. try:
  574. self.update_publish_timestamp(article)
  575. except Exception as e:
  576. print(e)
  577. error_msg = traceback.format_exc()
  578. print(error_msg)
  579. # check 一遍存在请求失败-1 && 0 的文章
  580. select_sql = f"""
  581. SELECT ContentUrl, wx_sn
  582. FROM {ARTICLE_TABLE}
  583. WHERE publish_timestamp in {(const.DEFAULT_STATUS, const.REQUEST_FAIL_STATUS)};
  584. """
  585. process_failed_articles = self.piaoquan_crawler_db_client.fetch(select_sql, cursor_type=DictCursor)
  586. fail_list = []
  587. if process_failed_articles:
  588. for article in tqdm(process_failed_articles):
  589. try:
  590. res = self.update_publish_timestamp(article)
  591. fail_list.append(res)
  592. except Exception as e:
  593. print(e)
  594. error_msg = traceback.format_exc()
  595. print(error_msg)
  596. # 通过msgId 来修改publish_timestamp
  597. update_sql = f"""
  598. UPDATE {ARTICLE_TABLE} oav
  599. JOIN (
  600. SELECT ghId, appMsgId, MAX(publish_timestamp) AS publish_timestamp
  601. FROM {ARTICLE_TABLE}
  602. WHERE publish_timestamp > %s
  603. GROUP BY ghId, appMsgId
  604. ) vv
  605. ON oav.appMsgId = vv.appMsgId and oav.ghId = vv.ghId
  606. SET oav.publish_timestamp = vv.publish_timestamp
  607. WHERE oav.publish_timestamp <= %s;
  608. """
  609. self.piaoquan_crawler_db_client.save(
  610. query=update_sql,
  611. params=(0, 0)
  612. )
  613. # 若还是无 publish_timestamp,用update_time当作 publish_timestamp
  614. update_sql_2 = f"""
  615. UPDATE {ARTICLE_TABLE}
  616. SET publish_timestamp = updateTime
  617. WHERE publish_timestamp < %s;
  618. """
  619. self.piaoquan_crawler_db_client.save(
  620. query=update_sql_2,
  621. params=0
  622. )
  623. if fail_list:
  624. bot(
  625. title="更新文章任务,请求detail失败",
  626. detail=fail_list
  627. )