updatePublishedMsgDaily.py 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710
  1. """
  2. @author: luojunhui
  3. @description: update daily information into official articles v2
  4. """
  5. import json
  6. import time
  7. import traceback
  8. import urllib.parse
  9. from argparse import ArgumentParser
  10. from datetime import datetime
  11. from typing import Dict, List
  12. from pymysql.cursors import DictCursor
  13. from tqdm import tqdm
  14. from applications import aiditApi
  15. from applications import bot
  16. from applications import create_feishu_columns_sheet
  17. from applications import Functions
  18. from applications import log
  19. from applications import WeixinSpider
  20. from applications.const import updatePublishedMsgTaskConst
  21. from applications.db import DatabaseConnector
  22. from config import denet_config, long_articles_config, piaoquan_crawler_config
  23. ARTICLE_TABLE = "official_articles_v2"
  24. const = updatePublishedMsgTaskConst()
  25. spider = WeixinSpider()
  26. functions = Functions()
  27. def generate_bot_columns():
  28. """
  29. 生成列
  30. :return:
  31. """
  32. columns = [
  33. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="name", display_name="公众号名称"),
  34. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="ghId", display_name="ghId"),
  35. create_feishu_columns_sheet(sheet_type="number", sheet_name="follower_count", display_name="粉丝数"),
  36. create_feishu_columns_sheet(sheet_type="date", sheet_name="account_init_timestamp",
  37. display_name="账号接入系统时间"),
  38. create_feishu_columns_sheet(sheet_type="plain_text", sheet_name="using_status", display_name="利用状态")
  39. ]
  40. return columns
  41. def get_account_status(aigc_db_client: DatabaseConnector(denet_config)) -> Dict:
  42. """
  43. 获取账号的实验状态
  44. :return:
  45. """
  46. sql = f"""
  47. SELECT t1.account_id, t2.status
  48. FROM wx_statistics_group_source_account t1
  49. JOIN wx_statistics_group_source t2
  50. ON t1.group_source_name = t2.account_source_name;
  51. """
  52. account_status_list = aigc_db_client.fetch(sql, cursor_type=DictCursor)
  53. account_status_dict = {account['account_id']: account['status'] for account in account_status_list}
  54. return account_status_dict
  55. def get_accounts(aigc_db_client: DatabaseConnector(denet_config)) -> List[Dict]:
  56. """
  57. 从 aigc 数据库中获取目前处于发布状态的账号
  58. :return:
  59. "name": line[0],
  60. "ghId": line[1],
  61. "follower_count": line[2],
  62. "account_init_time": int(line[3] / 1000),
  63. "account_type": line[4], # 订阅号 or 服务号
  64. "account_auth": line[5]
  65. """
  66. account_list_with_out_using_status = aiditApi.get_publish_account_from_aigc()
  67. account_status_dict = get_account_status(aigc_db_client)
  68. account_list = [
  69. {
  70. **item,
  71. 'using_status': 0 if account_status_dict.get(item['account_id']) == '实验' else 1
  72. }
  73. for item in account_list_with_out_using_status
  74. ]
  75. return account_list
  76. def insert_each_msg(db_client, account_info, account_name, msg_list):
  77. """
  78. 把消息数据更新到数据库中
  79. :param account_info:
  80. :param db_client:
  81. :param account_name:
  82. :param msg_list:
  83. :return:
  84. """
  85. gh_id = account_info['ghId']
  86. account_name = account_name['name']
  87. for info in msg_list:
  88. baseInfo = info.get("BaseInfo", {})
  89. appMsgId = info.get("AppMsg", {}).get("BaseInfo", {}).get("AppMsgId", None)
  90. createTime = info.get("AppMsg", {}).get("BaseInfo", {}).get("CreateTime", None)
  91. updateTime = info.get("AppMsg", {}).get("BaseInfo", {}).get("UpdateTime", None)
  92. Type = info.get("AppMsg", {}).get("BaseInfo", {}).get("Type", None)
  93. detail_article_list = info.get("AppMsg", {}).get("DetailInfo", [])
  94. if detail_article_list:
  95. for article in detail_article_list:
  96. title = article.get("Title", None)
  97. Digest = article.get("Digest", None)
  98. ItemIndex = article.get("ItemIndex", None)
  99. ContentUrl = article.get("ContentUrl", None)
  100. SourceUrl = article.get("SourceUrl", None)
  101. CoverImgUrl = article.get("CoverImgUrl", None)
  102. CoverImgUrl_1_1 = article.get("CoverImgUrl_1_1", None)
  103. CoverImgUrl_235_1 = article.get("CoverImgUrl_235_1", None)
  104. ItemShowType = article.get("ItemShowType", None)
  105. IsOriginal = article.get("IsOriginal", None)
  106. ShowDesc = article.get("ShowDesc", None)
  107. show_stat = functions.show_desc_to_sta(ShowDesc)
  108. ori_content = article.get("ori_content", None)
  109. show_view_count = show_stat.get("show_view_count", 0)
  110. show_like_count = show_stat.get("show_like_count", 0)
  111. show_zs_count = show_stat.get("show_zs_count", 0)
  112. show_pay_count = show_stat.get("show_pay_count", 0)
  113. wx_sn = ContentUrl.split("&sn=")[1].split("&")[0] if ContentUrl else None
  114. status = account_info['using_status']
  115. info_tuple = (
  116. gh_id,
  117. account_name,
  118. appMsgId,
  119. title,
  120. Type,
  121. createTime,
  122. updateTime,
  123. Digest,
  124. ItemIndex,
  125. ContentUrl,
  126. SourceUrl,
  127. CoverImgUrl,
  128. CoverImgUrl_1_1,
  129. CoverImgUrl_235_1,
  130. ItemShowType,
  131. IsOriginal,
  132. ShowDesc,
  133. ori_content,
  134. show_view_count,
  135. show_like_count,
  136. show_zs_count,
  137. show_pay_count,
  138. wx_sn,
  139. json.dumps(baseInfo, ensure_ascii=False),
  140. functions.str_to_md5(title),
  141. status
  142. )
  143. try:
  144. insert_sql = f"""
  145. INSERT INTO {ARTICLE_TABLE}
  146. (ghId, accountName, appMsgId, title, Type, createTime, updateTime, Digest, ItemIndex, ContentUrl, SourceUrl, CoverImgUrl, CoverImgUrl_1_1, CoverImgUrl_255_1, ItemShowType, IsOriginal, ShowDesc, ori_content, show_view_count, show_like_count, show_zs_count, show_pay_count, wx_sn, baseInfo, title_md5, status)
  147. values
  148. (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);
  149. """
  150. db_client.save(sql=insert_sql, params=info_tuple)
  151. log(
  152. task="updatePublishedMsgDaily",
  153. function="insert_each_msg",
  154. message="插入文章数据成功",
  155. data={
  156. "info": info_tuple
  157. }
  158. )
  159. except Exception as e:
  160. try:
  161. update_sql = f"""
  162. UPDATE {ARTICLE_TABLE}
  163. SET show_view_count = %s, show_like_count=%s
  164. WHERE wx_sn = %s;
  165. """
  166. db_client.save(sql=update_sql,
  167. params=(show_view_count, show_like_count, wx_sn))
  168. log(
  169. task="updatePublishedMsgDaily",
  170. function="insert_each_msg",
  171. message="更新文章数据成功",
  172. data={
  173. "wxSn": wx_sn,
  174. "likeCount": show_like_count,
  175. "viewCount": show_view_count
  176. }
  177. )
  178. except Exception as e:
  179. log(
  180. task="updatePublishedMsgDaily",
  181. function="insert_each_msg",
  182. message="更新文章失败, 报错原因是: {}".format(e),
  183. status="fail"
  184. )
  185. continue
  186. def update_each_account(db_client: DatabaseConnector(piaoquan_crawler_config), account_info: Dict, latest_update_time: int, cursor=None):
  187. """
  188. 更新每一个账号信息
  189. :param account_info:
  190. :param cursor:
  191. :param latest_update_time: 最新更新时间
  192. :param db_client: 数据库连接信息
  193. :return: None
  194. """
  195. gh_id = account_info['ghId']
  196. response = spider.update_msg_list(ghId=gh_id, index=cursor)
  197. msg_list = response.get("data", {}).get("data", {})
  198. if msg_list:
  199. # do
  200. last_article_in_this_msg = msg_list[-1]
  201. last_time_stamp_in_this_msg = last_article_in_this_msg['AppMsg']['BaseInfo']['UpdateTime']
  202. last_url = last_article_in_this_msg['AppMsg']['DetailInfo'][0]['ContentUrl']
  203. resdata = spider.get_account_by_url(last_url)
  204. check_id = resdata['data'].get('data', {}).get('wx_gh')
  205. if check_id == gh_id:
  206. insert_each_msg(
  207. db_client=db_client,
  208. account_info=account_info,
  209. msg_list=msg_list
  210. )
  211. if last_time_stamp_in_this_msg > latest_update_time:
  212. next_cursor = response['data']['next_cursor']
  213. return update_each_account(
  214. db_client=db_client,
  215. account_info=account_info,
  216. latest_update_time=latest_update_time,
  217. cursor=next_cursor
  218. )
  219. log(
  220. task="updatePublishedMsgDaily",
  221. function="update_each_account",
  222. message="账号文章更新成功",
  223. data=response
  224. )
  225. else:
  226. log(
  227. task="updatePublishedMsgDaily",
  228. function="update_each_account",
  229. message="账号文章更新失败",
  230. status="fail",
  231. data=response
  232. )
  233. return
  234. def check_account_info(piaoquan_crawler_db_client: DatabaseConnector(piaoquan_crawler_config), gh_id: str) -> int:
  235. """
  236. 通过 gh_id查询账号信息的最新发布时间
  237. :param piaoquan_crawler_db_client:
  238. :param gh_id:
  239. :return:
  240. """
  241. sql = f"""
  242. SELECT MAX(publish_timestamp)
  243. FROM {ARTICLE_TABLE}
  244. WHERE ghId = '{gh_id}';
  245. """
  246. result = piaoquan_crawler_db_client.fetch(sql)
  247. if result:
  248. return result[0][0]
  249. else:
  250. # 新号,抓取周期定位抓取时刻往前推30天
  251. return int(time.time()) - const.NEW_ACCOUNT_CRAWL_PERIOD
  252. def update_single_account(piaoquan_crawler_db_client: DatabaseConnector(piaoquan_crawler_config), account_info: Dict):
  253. """
  254. 更新单个账号
  255. :param piaoquan_crawler_db_client:
  256. :param account_info:
  257. :return:
  258. """
  259. gh_id = account_info['ghId']
  260. max_publish_time = check_account_info(piaoquan_crawler_db_client, gh_id)
  261. update_each_account(
  262. db_client=piaoquan_crawler_db_client,
  263. account_info=account_info,
  264. latest_update_time=max_publish_time
  265. )
  266. def check_single_account(db_client, account_item):
  267. """
  268. 校验每个账号是否更新
  269. :param db_client:
  270. :param account_item:
  271. :return: True / False
  272. """
  273. gh_id = account_item['ghId']
  274. account_type = account_item['account_type']
  275. today_str = datetime.today().strftime("%Y-%m-%d")
  276. today_date_time = datetime.strptime(today_str, "%Y-%m-%d")
  277. today_timestamp = today_date_time.timestamp()
  278. sql = f"""
  279. SELECT max(updateTime)
  280. FROM {ARTICLE_TABLE}
  281. WHERE ghId = '{gh_id}';
  282. """
  283. try:
  284. latest_update_time = db_client.fetch(sql)[0][0]
  285. # 判断该账号当天发布的文章是否被收集
  286. if account_type in const.SUBSCRIBE_TYPE_SET:
  287. if int(latest_update_time) > int(today_timestamp):
  288. return True
  289. else:
  290. return False
  291. else:
  292. if int(latest_update_time) > int(today_timestamp) - 7 * 24 * 3600:
  293. return True
  294. else:
  295. return False
  296. except Exception as e:
  297. print(e)
  298. return False
  299. def update_job(piaoquan_crawler_db_client, aigc_db_client):
  300. """
  301. 更新任务
  302. :return:
  303. """
  304. account_list = get_accounts(aigc_db_client=aigc_db_client)
  305. # 订阅号
  306. subscription_accounts = [i for i in account_list if i['account_type'] in const.SUBSCRIBE_TYPE_SET]
  307. success_count = 0
  308. fail_count = 0
  309. for sub_item in tqdm(subscription_accounts):
  310. try:
  311. update_single_account(piaoquan_crawler_db_client, sub_item)
  312. success_count += 1
  313. time.sleep(5)
  314. except Exception as e:
  315. fail_count += 1
  316. log(
  317. task="updatePublishedMsgDaily",
  318. function="update_job",
  319. message="单个账号文章更新失败, 报错信息是: {}".format(e),
  320. status="fail",
  321. )
  322. log(
  323. task="updatePublishedMsgDaily",
  324. function="update_job",
  325. message="订阅号更新完成",
  326. data={
  327. "success": success_count,
  328. "fail": fail_count
  329. }
  330. )
  331. if fail_count / (success_count + fail_count) > const.SUBSCRIBE_FAIL_RATE_THRESHOLD:
  332. bot(
  333. title="订阅号超过 30% 的账号更新失败",
  334. detail={
  335. "success": success_count,
  336. "fail": fail_count,
  337. "failRate": fail_count / (success_count + fail_count)
  338. }
  339. )
  340. bot(
  341. title="更新每日发布文章任务完成通知",
  342. detail={
  343. "msg": "订阅号更新完成",
  344. "finish_time": datetime.today().__str__()
  345. },
  346. mention=False
  347. )
  348. # 服务号
  349. server_accounts = [i for i in account_list if i['account_type'] == const.SERVICE_TYPE]
  350. for sub_item in tqdm(server_accounts):
  351. try:
  352. update_single_account(piaoquan_crawler_db_client, sub_item)
  353. time.sleep(5)
  354. except Exception as e:
  355. print(e)
  356. bot(
  357. title="更新每日发布文章任务完成通知",
  358. detail={
  359. "msg": "服务号更新完成",
  360. "finish_time": datetime.today().__str__()
  361. },
  362. mention=False
  363. )
  364. def check_job(piaoquan_crawler_db_client, aigc_db_client):
  365. """
  366. 校验任务
  367. :return:
  368. """
  369. account_list = get_accounts(aigc_db_client=aigc_db_client)
  370. # 订阅号
  371. subscription_accounts = [i for i in account_list if i['account_type'] in const.SUBSCRIBE_TYPE_SET]
  372. fail_list = []
  373. # check and rework if fail
  374. for sub_item in tqdm(subscription_accounts):
  375. res = check_single_account(piaoquan_crawler_db_client, sub_item)
  376. if not res:
  377. update_single_account(piaoquan_crawler_db_client, sub_item)
  378. # check whether success and bot if fails
  379. for sub_item in tqdm(account_list):
  380. res = check_single_account(piaoquan_crawler_db_client, sub_item)
  381. if not res:
  382. # 去掉三个不需要查看的字段
  383. sub_item.pop('account_type', None)
  384. sub_item.pop('account_auth', None)
  385. sub_item.pop('account_id', None)
  386. fail_list.append(sub_item)
  387. if fail_list:
  388. try:
  389. bot(
  390. title="更新当天发布文章,存在未更新的账号",
  391. detail={
  392. "columns": generate_bot_columns(),
  393. "rows": fail_list
  394. },
  395. table=True
  396. )
  397. except Exception as e:
  398. print("Timeout Error: {}".format(e))
  399. else:
  400. bot(
  401. title="更新当天发布文章,所有账号均更新成功",
  402. mention=False,
  403. detail={
  404. "msg": "校验任务完成",
  405. "finish_time": datetime.today().__str__()
  406. }
  407. )
  408. def get_articles(db_client):
  409. """
  410. :return:
  411. """
  412. sql = f"""
  413. SELECT ContentUrl, wx_sn
  414. FROM {ARTICLE_TABLE}
  415. WHERE publish_timestamp in {(const.DEFAULT_STATUS, const.REQUEST_FAIL_STATUS)};"""
  416. response = db_client.fetch(sql)
  417. return response
  418. def update_publish_timestamp(db_client, row):
  419. """
  420. 更新发布时间戳 && minigram 信息
  421. :param db_client:
  422. :param row:
  423. :return:
  424. """
  425. url = row[0]
  426. wx_sn = row[1]
  427. try:
  428. response = spider.get_article_text(url)
  429. response_code = response['code']
  430. if response_code == const.ARTICLE_DELETE_CODE:
  431. publish_timestamp_s = const.DELETE_STATUS
  432. root_source_id_list = []
  433. elif response_code == const.ARTICLE_ILLEGAL_CODE:
  434. publish_timestamp_s = const.ILLEGAL_STATUS
  435. root_source_id_list = []
  436. elif response_code == const.ARTICLE_SUCCESS_CODE:
  437. data = response['data']['data']
  438. publish_timestamp_ms = data['publish_timestamp']
  439. publish_timestamp_s = int(publish_timestamp_ms / 1000)
  440. mini_program = data.get('mini_program', [])
  441. if mini_program:
  442. root_source_id_list = [
  443. urllib.parse.parse_qs(
  444. urllib.parse.unquote(i['path'])
  445. )['rootSourceId'][0]
  446. for i in mini_program
  447. ]
  448. else:
  449. root_source_id_list = []
  450. else:
  451. publish_timestamp_s = const.UNKNOWN_STATUS
  452. root_source_id_list = []
  453. except Exception as e:
  454. publish_timestamp_s = const.REQUEST_FAIL_STATUS
  455. root_source_id_list = None
  456. error_msg = traceback.format_exc()
  457. print(e, error_msg)
  458. update_sql = f"""
  459. UPDATE {ARTICLE_TABLE}
  460. SET publish_timestamp = %s, root_source_id_list = %s
  461. WHERE wx_sn = %s;
  462. """
  463. db_client.save(
  464. sql=update_sql,
  465. params=(
  466. publish_timestamp_s,
  467. json.dumps(root_source_id_list, ensure_ascii=False),
  468. wx_sn
  469. ))
  470. if publish_timestamp_s == const.REQUEST_FAIL_STATUS:
  471. return row
  472. else:
  473. return None
  474. def get_article_detail_job(piaoquan_crawler_db_client):
  475. """
  476. 获取发布文章详情
  477. :return:
  478. """
  479. article_tuple = get_articles(piaoquan_crawler_db_client)
  480. for article in tqdm(article_tuple):
  481. try:
  482. update_publish_timestamp(db_client=piaoquan_crawler_db_client, row=article)
  483. except Exception as e:
  484. print(e)
  485. error_msg = traceback.format_exc()
  486. print(error_msg)
  487. # check 一遍存在请求失败-1 && 0 的文章
  488. process_failed_articles = get_articles(piaoquan_crawler_db_client)
  489. fail_list = []
  490. if process_failed_articles:
  491. for article in tqdm(process_failed_articles):
  492. try:
  493. res = update_publish_timestamp(db_client=piaoquan_crawler_db_client, row=article)
  494. fail_list.append({"wx_sn": res[1], "url": res[0]})
  495. except Exception as e:
  496. print(e)
  497. error_msg = traceback.format_exc()
  498. print(error_msg)
  499. # 通过msgId 来修改publish_timestamp
  500. update_sql = f"""
  501. UPDATE {ARTICLE_TABLE} oav
  502. JOIN (
  503. SELECT appMsgId, MAX(publish_timestamp) AS publish_timestamp
  504. FROM {ARTICLE_TABLE}
  505. WHERE publish_timestamp > %s
  506. GROUP BY appMsgId
  507. ) vv
  508. ON oav.appMsgId = vv.appMsgId
  509. SET oav.publish_timestamp = vv.publish_timestamp
  510. WHERE oav.publish_timestamp <= %s;
  511. """
  512. piaoquan_crawler_db_client.save(
  513. sql=update_sql,
  514. params=(0, 0)
  515. )
  516. # 若还是无 publish_timestamp,用update_time当作 publish_timestamp
  517. update_sql_2 = f"""
  518. UPDATE {ARTICLE_TABLE}
  519. SET publish_timestamp = updateTime
  520. WHERE publish_timestamp < %s;
  521. """
  522. piaoquan_crawler_db_client.save(
  523. sql=update_sql_2,
  524. params=0
  525. )
  526. if fail_list:
  527. bot(
  528. title="更新文章任务,请求detail失败",
  529. detail=fail_list
  530. )
  531. def whether_title_unsafe(db_client, title):
  532. """
  533. 检查文章标题是否已经存在违规记录
  534. :param db_client:
  535. :param title:
  536. :return:
  537. """
  538. title_md5 = functions.str_to_md5(title)
  539. sql = f"""
  540. SELECT title_md5
  541. FROM article_unsafe_title
  542. WHERE title_md5 = '{title_md5}';
  543. """
  544. res = db_client.fetch(sql)
  545. if res:
  546. return True
  547. else:
  548. return False
  549. def monitor(piaoquan_crawler_db_client, long_articles_db_client, run_date):
  550. """
  551. 监控任务, 监测周期为7天,监测文章是否被违规,若监测到违规文章,则进行告警
  552. :return:
  553. """
  554. if not run_date:
  555. run_date = datetime.today().strftime("%Y-%m-%d")
  556. monitor_start_timestamp = int(datetime.strptime(run_date, "%Y-%m-%d").timestamp()) - const.MONITOR_PERIOD
  557. select_sql = f"""
  558. SELECT ghId, accountName, title, ContentUrl, wx_sn, from_unixtime(publish_timestamp) AS publish_timestamp
  559. FROM {ARTICLE_TABLE}
  560. WHERE publish_timestamp >= {monitor_start_timestamp};
  561. """
  562. article_list = piaoquan_crawler_db_client.fetch(select_sql)
  563. for article in tqdm(article_list, desc="monitor article list"):
  564. gh_id = article[0]
  565. account_name = article[1]
  566. title = article[2]
  567. # 判断标题是否存在违规记录
  568. if whether_title_unsafe(long_articles_db_client, title):
  569. continue
  570. url = article[3]
  571. wx_sn = article[4]
  572. publish_date = article[5]
  573. try:
  574. response = spider.get_article_text(url, is_cache=False)
  575. response_code = response['code']
  576. if response_code == const.ARTICLE_ILLEGAL_CODE:
  577. bot(
  578. title="文章违规告警",
  579. detail={
  580. "ghId": gh_id,
  581. "accountName": account_name,
  582. "title": title,
  583. "wx_sn": str(wx_sn),
  584. "publish_date": str(publish_date)
  585. },
  586. mention=False
  587. )
  588. aiditApi.delete_articles(
  589. gh_id=gh_id,
  590. title=title
  591. )
  592. except Exception as e:
  593. error_msg = traceback.format_exc()
  594. log(
  595. task="monitor",
  596. function="monitor",
  597. message="请求文章详情失败",
  598. data={
  599. "ghId": gh_id,
  600. "accountName": account_name,
  601. "title": title,
  602. "wx_sn": str(wx_sn),
  603. "error": str(e),
  604. "msg": error_msg
  605. }
  606. )
  607. def main():
  608. """
  609. main
  610. :return:
  611. """
  612. parser = ArgumentParser()
  613. parser.add_argument(
  614. "--run_task",
  615. help="update: update_job, check: check_job, detail: get_article_detail_job, monitor: monitor")
  616. parser.add_argument(
  617. "--run_date",
  618. help="--run_date %Y-%m-%d",
  619. )
  620. args = parser.parse_args()
  621. # 初始化数据库连接
  622. try:
  623. piaoquan_crawler_db_client = DatabaseConnector(piaoquan_crawler_config)
  624. piaoquan_crawler_db_client.connect()
  625. aigc_db_client = DatabaseConnector(denet_config)
  626. aigc_db_client.connect()
  627. long_articles_db_client = DatabaseConnector(long_articles_config)
  628. except Exception as e:
  629. error_msg = traceback.format_exc()
  630. bot(
  631. title="更新文章任务连接数据库失败",
  632. detail={
  633. "error": e,
  634. "msg": error_msg
  635. }
  636. )
  637. return
  638. if args.run_task:
  639. run_task = args.run_task
  640. match run_task:
  641. case "update":
  642. update_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client, aigc_db_client=aigc_db_client)
  643. case "check":
  644. check_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client, aigc_db_client=aigc_db_client)
  645. case "detail":
  646. get_article_detail_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client)
  647. case "monitor":
  648. if args.run_date:
  649. run_date = args.run_date
  650. else:
  651. run_date = None
  652. monitor(piaoquan_crawler_db_client=piaoquan_crawler_db_client,
  653. long_articles_db_client=long_articles_db_client, run_date=run_date)
  654. case _:
  655. print("No such task, input update: update_job, check: check_job, detail: get_article_detail_job")
  656. else:
  657. update_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client, aigc_db_client=aigc_db_client)
  658. check_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client, aigc_db_client=aigc_db_client)
  659. get_article_detail_job(piaoquan_crawler_db_client=piaoquan_crawler_db_client)
  660. if __name__ == '__main__':
  661. main()