zhongqingkandian.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352
  1. import os
  2. import sys
  3. import json
  4. import random
  5. import uuid
  6. import time
  7. import traceback
  8. from datetime import datetime
  9. import requests
  10. from requests.adapters import HTTPAdapter
  11. from urllib3.util.retry import Retry
  12. sys.path.append(os.getcwd())
  13. from application.common.feishu import FsData
  14. from application.common.feishu.feishu_utils import FeishuUtils
  15. from application.common.gpt import GPT4oMini
  16. from application.common.messageQueue import MQ
  17. from application.common.log import AliyunLogger
  18. from application.functions.zqkd_db_redis import DatabaseOperations, RedisOperations
  19. from application.items import VideoItem
  20. from application.pipeline import PiaoQuanPipeline
  21. from application.common.log import Local
  22. class ZhongQingKanDian:
  23. API_BASE_URL = "http://8.217.192.46:8889"
  24. COMMON_HEADERS = {
  25. "Content-Type": "application/json"
  26. }
  27. # 最大重试次数
  28. MAX_RETRIES = 3
  29. # 最大等待时长
  30. TIMEOUT = 30
  31. def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
  32. """
  33. 初始化
  34. :param platform: 平台名称 zhongqingkandian
  35. :param mode: 运行模式 recommend
  36. :param rule_dict: 规则字典,包含视频数量限制、时长限制等规则 [{"videos_cnt":{"min":100,"max":0}},{"duration":{"min":30,"max":1200}}]
  37. :param user_list: 用户列表
  38. :param env: 运行环境,默认为 "prod"
  39. """
  40. self.limit_flag = True
  41. self.platform = platform
  42. self.mode = mode
  43. self.rule_dict = rule_dict
  44. self.user_list = user_list
  45. self.env = env
  46. self.download_cnt = 0
  47. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  48. self.expire_flag = False
  49. self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
  50. self.db_ops = DatabaseOperations(mode=mode, platform=platform)
  51. self.redis_ops = RedisOperations(mode=mode, platform=platform)
  52. data_rule = FsData()
  53. self.title_rule = data_rule.get_title_rule()
  54. self.LocalLog = Local.logger(self.platform, self.mode)
  55. self.session = requests.session()
  56. def send_request(self, path, data):
  57. """
  58. 同步发送 POST 请求到指定路径,带有重试机制。
  59. :param path: 请求的 API 路径
  60. :param data: 请求的数据
  61. :return: 响应的 JSON 数据,如果请求失败则返回 None
  62. """
  63. full_url = f"{self.API_BASE_URL}{path}"
  64. for retry in range(self.MAX_RETRIES):
  65. try:
  66. response = self.session.post(full_url, data=data, timeout=self.TIMEOUT, headers=self.COMMON_HEADERS)
  67. response.raise_for_status()
  68. self.LocalLog.info(f"{path}响应数据:{response.json()}")
  69. return response.json()
  70. except Exception as e:
  71. tb_info = traceback.format_exc()
  72. self.LocalLog.info(f"{path}请求失败:{e} \n{tb_info}")
  73. self.aliyun_log.logging(
  74. code="3000",
  75. message=f"请求 {path} 失败,错误信息: {str(e)}",
  76. data={"path": path}
  77. )
  78. time.sleep(5)
  79. return None
  80. def is_response_valid(self, resp, url):
  81. """
  82. 检查响应是否有效(状态码为 0 表示有效)。
  83. :param resp: 响应数据
  84. :param url: 请求的 URL
  85. :return: 如果响应有效则返回响应数据,否则返回 None
  86. """
  87. try:
  88. if resp and resp.get('code') != 0:
  89. self.aliyun_log.logging(
  90. code="3000",
  91. message=f"抓取{url}失败,请求失败,响应:{resp}"
  92. )
  93. self.LocalLog.info(f"{url}请求失败,响应:{resp}")
  94. return None
  95. return resp
  96. except Exception as e:
  97. tb_info = traceback.format_exc()
  98. self.aliyun_log.logging(
  99. code="3000",
  100. message=f"检查响应有效性时出错,错误信息: {str(e)}",
  101. data={"url": url, "resp": resp}
  102. )
  103. self.LocalLog.info(f"检查 {url} 响应有效性时出错:{e} \n{tb_info}")
  104. return None
  105. def req_recommend_list(self):
  106. """
  107. 同步请求推荐视频列表。
  108. :return: 推荐视频列表的有效响应数据,如果请求失败则返回 None
  109. """
  110. try:
  111. url = '/crawler/zhong_qing_kan_dian/recommend'
  112. body = json.dumps({"cursor": ""})
  113. self.LocalLog.info(f"开始请求推荐{body}")
  114. resp = self.send_request(url, body)
  115. return self.is_response_valid(resp, url)
  116. except Exception as e:
  117. tb_info = traceback.format_exc()
  118. self.aliyun_log.logging(
  119. code="1003",
  120. message=f"请求推荐视频列表时发生异常,错误信息: {str(e)}\n{tb_info}",
  121. data={"url": url}
  122. )
  123. self.LocalLog.info(f"请求推荐视频列表 {url} 时发生异常:{str(e)} \n{tb_info}")
  124. return None
  125. def req_detail(self, content_link, **kwargs):
  126. """
  127. 同步请求视频详情。
  128. :param content_link: 视频内容链接
  129. :param label: 视频标签(如 "recommend" 或 "related")
  130. :param kwargs: 额外的视频信息
  131. :return: 无返回值,处理视频详情信息
  132. """
  133. try:
  134. self.LocalLog.info(f"开始请求视频详情,链接: {content_link}")
  135. url = '/crawler/zhong_qing_kan_dian/detail'
  136. body = json.dumps({
  137. "content_link": content_link
  138. })
  139. resp = self.send_request(url, body)
  140. if not self.is_response_valid(resp, url):
  141. return
  142. data = resp.get("data", {}).get("data", {})
  143. if data.get("content_type") != "video":
  144. self.aliyun_log.logging(
  145. code="3003",
  146. message=f"跳过非视频内容",
  147. data={"content_link": content_link}
  148. )
  149. self.LocalLog.info(f"跳过非视频内容,链接: {content_link}")
  150. return
  151. self.LocalLog.info(f"{content_link} 是视频")
  152. data.update(kwargs)
  153. self.process_video_obj(data)
  154. except Exception as e:
  155. tb_info = traceback.format_exc()
  156. self.aliyun_log.logging(
  157. code="1005",
  158. message=f"请求视频详情时发生异常,错误信息: {str(e)}",
  159. data={"content_link": content_link}
  160. )
  161. self.LocalLog.error(f"请求视频详情,链接 {content_link} 时发生异常:{e} \n{tb_info}")
  162. def control_request_recommend(self):
  163. """
  164. 控制推荐视频列表的请求和处理流程。
  165. :return: 无返回值,根据下载数量限制控制流程
  166. """
  167. while self.limit_flag:
  168. try:
  169. self.LocalLog.info(f"开始推荐视频列表的请求和处理流程,今日已爬推荐 {self.download_cnt} 个视频")
  170. recommend_resp = self.req_recommend_list()
  171. if not recommend_resp:
  172. time.sleep(random.randint(5, 10))
  173. continue
  174. recommend_list = recommend_resp.get("data", {}).get("data", [])
  175. self.LocalLog.info(f"获取的推荐列表长度:{len(recommend_list)}")
  176. for video_obj in recommend_list:
  177. # if not self.limit_flag:
  178. # self.LocalLog.info(f"今日视频数量已达最大量{self.download_cnt}")
  179. # return
  180. content_link = video_obj.get("share_url")
  181. content_id = video_obj.get("id")
  182. self.LocalLog.info(f"content_link == {content_link} \n content_id == {content_id}")
  183. if not (content_link and content_id):
  184. continue
  185. # 当前内容id保存到redis
  186. self.redis_ops.save_recommend_video(content_id)
  187. time.sleep(random.randint(5, 10))
  188. self.req_detail(content_link, **video_obj)
  189. except Exception as e:
  190. tb_info = traceback.format_exc()
  191. self.aliyun_log.logging(
  192. code="3008",
  193. message=f"控制推荐视频请求和处理时发生异常,错误信息: {str(e)}",
  194. data={}
  195. )
  196. self.LocalLog.info(f"控制推荐视频请求和处理时发生异常:\n{tb_info}")
  197. self.LocalLog.info(f"循环结束,当前 limit_flag 值为: {self.limit_flag}")
  198. def process_video_obj(self, video_obj):
  199. """
  200. 处理视频对象,包括检查视频时长、用户信息、保存数据等操作。
  201. :param video_obj: 视频对象,包含视频的各种信息
  202. :return: 无返回值,完成视频对象的处理
  203. """
  204. try:
  205. video_duration = video_obj["video_url_list"][0]['video_duration']
  206. video_id = video_obj['channel_content_id']
  207. # 检查视频ID是否存在
  208. if self.redis_ops.check_video_id_exists(video_id):
  209. self.aliyun_log.logging(
  210. code="3004",
  211. message=f"重复视频ID:{video_id}"
  212. )
  213. self.LocalLog.info(f"重复视频ID: {video_id}")
  214. return
  215. our_user = random.choice(self.user_list)
  216. trace_id = self.platform + str(uuid.uuid1())
  217. item = VideoItem()
  218. account_id = video_obj["channel_account_id"]
  219. account_name = video_obj["channel_account_name"]
  220. account_avatar = video_obj["avatar"]
  221. # 检查用户ID是否存在
  222. is_repeat_user = self.db_ops.check_user_id(account_id)
  223. if is_repeat_user:
  224. # 更新用户信息,使用异步方法并等待结果
  225. self.LocalLog.info(f"用户{account_id}已经存在数据库中")
  226. self.db_ops.update_user(account_id, account_name, account_avatar)
  227. else:
  228. self.LocalLog.info(f"用户{account_id}没在数据库中")
  229. # 插入用户信息,使用异步方法并等待结果
  230. self.db_ops.insert_user(account_id, account_name, account_avatar)
  231. self.aliyun_log.logging(code="1007", message=f"用户数据写入成功,用户ID:{account_id}")
  232. self.LocalLog.info(f"用户数据写入成功,用户ID: {account_id}")
  233. if video_duration > self.rule_dict.get("duration", {}).get("max", 1200) or video_duration < self.rule_dict.get("duration", {}).get("min", 30):
  234. self.aliyun_log.logging(
  235. code="3005",
  236. message=f"视频时长不满足条件[>=30s&<=1200s]视频ID:{video_obj['channel_content_id']},视频时长:{video_duration}"
  237. )
  238. self.LocalLog.info(
  239. f"视频时长不满足条件,视频ID: {video_obj['channel_content_id']}, 视频时长: {video_duration}")
  240. return
  241. item.add_video_info("video_id", video_obj['channel_content_id'])
  242. item.add_video_info("video_title", video_obj["title"])
  243. item.add_video_info("play_cnt", self.convert_number(video_obj["read_num"]))
  244. item.add_video_info("publish_time_stamp", int(int(video_obj["publish_timestamp"]) / 1000))
  245. item.add_video_info("out_user_id", video_obj["channel_account_id"])
  246. item.add_video_info("cover_url", video_obj["image_url_list"][0]['image_url'])
  247. item.add_video_info("like_cnt", 0)
  248. item.add_video_info("collection_cnt", int(video_obj['collect_num']))
  249. item.add_video_info("share_cnt", int(video_obj["share_num"]))
  250. item.add_video_info("comment_cnt", int(video_obj["cmt_num"]))
  251. item.add_video_info("video_url", video_obj["video_url_list"][0]['video_url'])
  252. item.add_video_info("out_video_id", int(video_obj["channel_content_id"]))
  253. item.add_video_info("duration", video_obj["video_url_list"][0]['video_duration'])
  254. item.add_video_info("platform", self.platform)
  255. item.add_video_info("strategy", self.mode)
  256. item.add_video_info("session", f"{self.platform}-{int(time.time())}")
  257. item.add_video_info("user_id", our_user["uid"])
  258. item.add_video_info("user_name", our_user["nick_name"])
  259. mq_obj = item.produce_item()
  260. pipeline = PiaoQuanPipeline(
  261. platform=self.platform,
  262. mode=self.mode,
  263. rule_dict=self.rule_dict,
  264. env=self.env,
  265. item=mq_obj,
  266. trace_id=traceback.format_exc()
  267. )
  268. if pipeline.process_item():
  269. title_list = self.title_rule.split(",")
  270. title = video_obj["title"]
  271. contains_keyword = any(keyword in title for keyword in title_list)
  272. if contains_keyword:
  273. new_title = GPT4oMini.get_ai_mini_title(title)
  274. if new_title:
  275. item.add_video_info("video_title", new_title)
  276. current_time = datetime.now()
  277. formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
  278. values = [
  279. [
  280. video_obj["video_url_list"][0]['video_url'],
  281. video_obj["image_url_list"][0]['image_url'],
  282. title,
  283. new_title,
  284. formatted_time,
  285. ]
  286. ]
  287. FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "v8S6nL", "ROWS", 1, 2)
  288. time.sleep(0.5)
  289. FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "v8S6nL", "A2:Z2", values)
  290. self.mq.send_msg(mq_obj)
  291. self.download_cnt += 1
  292. self.aliyun_log.logging(
  293. code="2009",
  294. message=f"成功发送视频到etl",
  295. data={"video_obj": video_obj}
  296. )
  297. # 保存视频ID
  298. self.redis_ops.save_video_id(video_obj['channel_content_id'])
  299. if self.download_cnt >= self.rule_dict.get("videos_cnt", {}).get("min", 100):
  300. self.LocalLog.info("当日视频已达到最大爬取量")
  301. self.limit_flag = False
  302. except Exception as e:
  303. tb_info = traceback.format_exc()
  304. self.aliyun_log.logging(
  305. code="1005",
  306. message=f"处理视频对象时发生异常,错误信息: {str(e)}",
  307. data={"video_obj": video_obj}
  308. )
  309. self.LocalLog.error(f"处理视频对象时发生异常: {e}\n{tb_info}")
  310. def convert_number(self, s):
  311. if not isinstance(s, str):
  312. return s
  313. try:
  314. return float(s.strip('万')) * 10000 if '万' in s else int(s)
  315. except ValueError:
  316. self.LocalLog.info(f"无法将 '{s}' 转换为有效的数字。")
  317. def run(self):
  318. """
  319. 运行主流程,执行推荐视频和相关推荐视频的请求,直到达到下载数量限制。
  320. :return: 无返回值,程序运行的主逻辑
  321. """
  322. self.LocalLog.info("开始执行中青看点推荐抓取...")
  323. self.control_request_recommend()
  324. if __name__ == '__main__':
  325. ZhongQingKanDian(
  326. platform="zhongqingkandian",
  327. mode="recommend",
  328. rule_dict={'videos_cnt': {'min': 2, 'max': 0}, 'duration': {'min': 30, 'max': 1200}},
  329. user_list=[{"uid": 81522822, "link": "中青看点推荐", "nick_name": "免不了俗"}]
  330. ).run()