crawler_hot_point.py 6.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191
  1. from __future__ import annotations
  2. import asyncio
  3. import json
  4. import traceback
  5. from typing import Dict, List, Tuple
  6. from tqdm.asyncio import tqdm
  7. from applications.api import fetch_deepseek_completion
  8. from applications.crawler.tophub import get_hot_point_content
  9. class CrawlerHotPointConst:
  10. MAX_PAGE_INDEX = 40
  11. INIT_STATUS = 0
  12. PROCESSING_STATUS = 1
  13. USEFUL_STATUS = 2
  14. NOT_USEFUL_STATUS = 3
  15. FAILED_STATUS = 99
  16. NOT_EXPIRED_STATUS = 1
  17. EXPIRED_STATUS = 2
  18. # batch
  19. PROCESS_TITLE_BATCH_SIZE = 500
  20. # ignore platforms
  21. IGNORE_PLATFORMS = {
  22. "中国日报", "每日珠宝杂志", "iBag包包", "ZAKER", "NASA 🌍", "wikiHow 中文",
  23. "China Daily", "微信 ‧ 游戏", "Yahoo News", "北京天文馆", "本地宝"
  24. }
  25. class CrawlerHotPointBase(CrawlerHotPointConst):
  26. CLASSIFY_PROMPT = """
  27. 你是一个内容分析助手,专门从热榜标题中识别出55岁以上老年人可能喜欢或关注的银发内容。
  28. 银发内容通常涉及健康、养老、退休生活、老年疾病、社会保障、代际关系、奇闻趣事、名人故事、社会事件等主题。
  29. 不要出现政治,当代国家领导人等敏感事件。
  30. 1. **任务描述**:
  31. 扫描所有标题,筛选出与银发内容高度相关时效性新闻信息。相关性判断基于标题是否直接或间接提及老年人相关话题,或可能吸引55岁以上人群的兴趣。返回适合的 id。
  32. 如果遇到敏感人物,正常过滤。请注意,一定要是新闻性事件, 请严格判断标题是否适合老年群体。
  33. 4. **输出格式**:输出结果为 JSON,只需要返回适合老年人话题的 id, 结构为
  34. {
  35. "IDS": [1, 2, 3, ...]
  36. }
  37. 现在, 请处理我输入的标题 && id, please think step by step.
  38. """
  39. @staticmethod
  40. def format_input_articles(fetch_response: List[Dict]) -> str:
  41. """
  42. 格式化输入文章为字符串,每个文章占一行,格式为:id, title
  43. """
  44. output_string = ""
  45. for item in fetch_response:
  46. output_string += f"{item['id']}, {item['title']}\n"
  47. return output_string
  48. class CrawlerHotPointMapper(CrawlerHotPointBase):
  49. def __init__(self, pool, log_client, trace_id):
  50. self.pool = pool
  51. self.log_client = log_client
  52. self.trace_id = trace_id
  53. async def save_articles(self, articles: List[Tuple]) -> int:
  54. """插入标题 && Link"""
  55. query = """
  56. INSERT IGNORE INTO hot_point_titles
  57. (title, platform, link)
  58. VALUES (%s, %s, %s);
  59. """
  60. return await self.pool.async_save(query=query, params=articles, batch=True)
  61. async def update_useful_status(
  62. self, article_id: int, origin_status: int, new_status: int
  63. ) -> int:
  64. """
  65. 更新文章状态
  66. """
  67. query = """
  68. UPDATE hot_point_titles
  69. SET useful = %s
  70. WHERE id = %s AND useful = %s;
  71. """
  72. return await self.pool.async_save(
  73. query=query, params=(new_status, article_id, origin_status)
  74. )
  75. async def set_as_processing(self, title_ids: List[int]) -> int:
  76. query = """
  77. UPDATE hot_point_titles
  78. SET useful = %s
  79. WHERE id IN %s;"""
  80. return await self.pool.async_save(
  81. query=query, params=(self.PROCESSING_STATUS, tuple(title_ids))
  82. )
  83. async def set_as_failed(self, title_ids: List[int]) -> int:
  84. """
  85. 设置文章为失败
  86. """
  87. query = """
  88. UPDATE hot_point_titles
  89. SET useful = %s
  90. WHERE id IN %s;
  91. """
  92. return await self.pool.async_save(
  93. query=query, params=(self.FAILED_STATUS, tuple(title_ids))
  94. )
  95. async def set_as_expired(self, article_id: int) -> int:
  96. """
  97. 设置文章为过期
  98. """
  99. query = """
  100. UPDATE hot_point_titles
  101. SET status = %s
  102. WHERE id = %s;
  103. """
  104. return await self.pool.async_save(
  105. query=query, params=(self.EXPIRED_STATUS, article_id)
  106. )
  107. async def fetch_init_articles(self) -> List[Dict]:
  108. """
  109. 获取未经过 LLM 判处处理的事件
  110. """
  111. query = """
  112. SELECT id, title FROM hot_point_titles WHERE status = %s AND useful = %s
  113. ORDER BY id Limit %s;
  114. """
  115. return await self.pool.async_fetch(query=query, params=(self.NOT_EXPIRED_STATUS, self.INIT_STATUS, self.PROCESS_TITLE_BATCH_SIZE))
  116. class CrawlerHotPointTask(CrawlerHotPointMapper):
  117. def __init__(self, pool, log_client, trace_id):
  118. super().__init__(pool, log_client, trace_id)
  119. def process_raw_data(self, response_data):
  120. """
  121. 处理原始数据
  122. """
  123. articles = []
  124. for item in response_data['data']['data']:
  125. platform = item["source"]
  126. if platform in self.IGNORE_PLATFORMS:
  127. continue
  128. for article in item["rankList"]:
  129. title = article["title"]
  130. link = article["link"]
  131. articles.append((title, platform, link))
  132. return articles
  133. async def crawl_hot_titles(self):
  134. """
  135. 爬取热点标题
  136. """
  137. for page in tqdm(range(1, self.MAX_PAGE_INDEX)):
  138. try:
  139. raw_data = await get_hot_point_content(page_index=page)
  140. articles = self.process_raw_data(raw_data)
  141. await self.save_articles(articles)
  142. except Exception as e:
  143. print(f"crawl_hot_titles error: {e}")
  144. async def classify_articles_by_llm(self):
  145. """
  146. 用大模型进行分类,判断热点事件是否符合老年人的兴趣爱好
  147. """
  148. infos = await self.fetch_init_articles()
  149. # acquire lock
  150. title_ids = [item["id"] for item in infos]
  151. await self.set_as_processing(title_ids)
  152. prompt = f"{self.CLASSIFY_PROMPT}\n{self.format_input_articles(infos)}"
  153. response = fetch_deepseek_completion(
  154. prompt=prompt, model="DeepSeek-R1", output_type="json"
  155. )
  156. if not response:
  157. w = await self.set_as_failed([item["id"] for item in infos])
  158. print(w)
  159. return
  160. ids = set(response.get("IDS", []))
  161. for item in tqdm(infos):
  162. id_ = item["id"]
  163. if id_ in ids:
  164. await self.update_useful_status(id_, self.PROCESSING_STATUS, self.USEFUL_STATUS)
  165. else:
  166. await self.update_useful_status(id_, self.PROCESSING_STATUS, self.NOT_USEFUL_STATUS)