update_article_info_from_aigc.py 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. """
  2. @author: luojunhui
  3. """
  4. import json
  5. import time
  6. from typing import List, Dict
  7. from pymysql.cursors import DictCursor
  8. from tqdm import tqdm
  9. from applications import aiditApi
  10. from applications import bot
  11. from applications import log
  12. from applications import WeixinSpider
  13. from applications.const import ArticleCollectorConst
  14. from applications.db import DatabaseConnector
  15. from applications.functions import Functions
  16. from config import denet_config, long_articles_config
  17. empty_dict = {}
  18. const = ArticleCollectorConst()
  19. functions = Functions()
  20. spider = WeixinSpider()
  21. class UpdateArticleInfoFromAIGC(object):
  22. """
  23. 从aigc获取文章信息
  24. """
  25. def __init__(self):
  26. self.aigc_db_client = DatabaseConnector(db_config=denet_config)
  27. self.long_articles_db_client = DatabaseConnector(db_config=long_articles_config)
  28. self.aigc_db_client.connect()
  29. self.long_articles_db_client.connect()
  30. def get_published_articles(self) -> List[Dict]:
  31. """
  32. 获取当天发布文章的List
  33. """
  34. sql = f"""
  35. SELECT trace_id, push_type
  36. FROM long_articles_published_trace_id
  37. WHERE create_timestamp > UNIX_TIMESTAMP(DATE_SUB(CURDATE(), INTERVAL 1 DAY)) AND status = {const.INIT_STATUS};
  38. """
  39. article_list = self.long_articles_db_client.fetch(sql, cursor_type=DictCursor)
  40. return article_list
  41. def get_article_info_from_aigc(self, trace_id: str) -> Dict:
  42. """
  43. 从aigc获取发布结果
  44. """
  45. sql = f"""
  46. SELECT t2.crawler_channel_content_id, t2.publish_stage_url, t2.publish_timestamp, t1.result_data
  47. from publish_content_miniprogram t1
  48. join publish_content t2 on t1.publish_content_id = t2.id
  49. where t1.trace_id = '{trace_id}' and t2.status = {const.PUBLISHED_STATUS};
  50. """
  51. article_info = self.aigc_db_client.fetch(sql, cursor_type=DictCursor)
  52. if article_info:
  53. return article_info[0]
  54. else:
  55. return empty_dict
  56. def get_article_info_by_trace_id(self, trace_id: str) -> Dict:
  57. """
  58. 通过trace_id来查询文章信息
  59. """
  60. select_sql = f"""
  61. SELECT t1.gh_id, t1.account_name, t2.article_title
  62. FROM long_articles_match_videos t1
  63. JOIN long_articles_text t2
  64. ON t1.content_id = t2.content_id
  65. WHERE t1.trace_id = '{trace_id}';
  66. """
  67. article_info = self.long_articles_db_client.fetch(select_sql, cursor_type=DictCursor)
  68. if article_info:
  69. return article_info[0]
  70. else:
  71. return empty_dict
  72. def update_each_article(self, article: Dict):
  73. """
  74. 更新每个文章的信息
  75. """
  76. trace_id = article["trace_id"]
  77. push_type = article["push_type"]
  78. article_info = self.get_article_info_from_aigc(trace_id)
  79. if article_info:
  80. channel_content_id = article_info["crawler_channel_content_id"]
  81. published_url = article_info["publish_stage_url"]
  82. publish_timestamp = int(article_info["publish_timestamp"] / 1000)
  83. result_data = json.loads(article_info["result_data"])
  84. root_source_id_list = [
  85. functions.extract_path(item["productionPath"])["root_source_id"] for item in result_data
  86. ]
  87. wx_sn = None
  88. if published_url:
  89. response = spider.get_article_text(content_link=published_url)
  90. code = response['code']
  91. match code:
  92. case const.ARTICLE_SUCCESS_CODE:
  93. long_url = response['data']['data']['content_link']
  94. wx_sn = functions.extract_params_from_url(url=long_url, key="sn")
  95. status = const.SUCCESS_STATUS
  96. case const.ARTICLE_DELETE_CODE:
  97. log(
  98. task="update_article_info_from_aigc",
  99. function="update_each_article",
  100. status="fail",
  101. message=trace_id,
  102. data={
  103. "msg": "文章被删文",
  104. "publish_timestamp": publish_timestamp,
  105. "article_delete_timestamp": int(time.time()),
  106. "duration": int(time.time()) - publish_timestamp
  107. }
  108. )
  109. status = const.FAIL_STATUS
  110. case const.ARTICLE_ILLEGAL_CODE:
  111. log(
  112. task="update_article_info_from_aigc",
  113. function="update_each_article",
  114. status="fail",
  115. message=trace_id,
  116. data={
  117. "msg": "文章被判断违规",
  118. "publish_timestamp": publish_timestamp,
  119. "illegal_timestamp": int(time.time()),
  120. "duration": int(time.time()) - publish_timestamp
  121. }
  122. )
  123. article_info = self.get_article_info_by_trace_id(trace_id)
  124. if article_info:
  125. error_detail = response.get("msg")
  126. insert_sql = f"""
  127. INSERT IGNORE INTO illegal_articles
  128. (gh_id, account_name, title, wx_sn, publish_date, illegal_reason)
  129. VALUES
  130. (%s, %s, %s, %s, %s, %s);
  131. """
  132. affected_rows = self.long_articles_db_client.save(
  133. query=insert_sql,
  134. params=(
  135. article_info['gh_id'],
  136. article_info['account_name'],
  137. article_info['article_title'],
  138. wx_sn,
  139. functions.timestamp_to_str(publish_timestamp),
  140. error_detail
  141. )
  142. )
  143. if affected_rows:
  144. bot(
  145. title="文章违规告警(new task)",
  146. detail={
  147. "account_name": article_info['account_name'],
  148. "gh_id": article_info['gh_id'],
  149. "title": article_info['article_title'],
  150. "wx_sn": wx_sn,
  151. "publish_date": functions.timestamp_to_str(publish_timestamp),
  152. "error_detail": error_detail,
  153. },
  154. mention=False
  155. )
  156. aiditApi.delete_articles(
  157. gh_id=article_info['gh_id'],
  158. title=article_info['article_title']
  159. )
  160. status = const.FAIL_STATUS
  161. case _:
  162. status = const.FAIL_STATUS
  163. else:
  164. if push_type == const.BULK_AUTO_PUSH:
  165. status = const.INIT_STATUS
  166. else:
  167. status = const.SUCCESS_STATUS
  168. update_sql = f"""
  169. UPDATE long_articles_published_trace_id
  170. SET published_url = %s, status = %s, wx_sn = %s, publish_timestamp = %s, crawler_channel_content_id = %s, root_source_id_list = %s
  171. WHERE trace_id = %s;
  172. """
  173. self.long_articles_db_client.save(
  174. query=update_sql,
  175. params=(published_url, status, wx_sn, publish_timestamp, channel_content_id, json.dumps(root_source_id_list), trace_id)
  176. )
  177. else:
  178. update_sql = f"""
  179. UPDATE long_articles_published_trace_id
  180. SET status = %s
  181. WHERE trace_id = %s;
  182. """
  183. self.long_articles_db_client.save(
  184. query=update_sql, params=(const.FAIL_STATUS, trace_id)
  185. )
  186. def deal(self):
  187. """
  188. main function
  189. """
  190. article_list = self.get_published_articles()
  191. for article in tqdm(article_list, desc="更新文章信息"):
  192. try:
  193. self.update_each_article(article)
  194. except Exception as e:
  195. print(e)