zhongqingkandian_related_recommend.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349
  1. import os
  2. import sys
  3. import json
  4. import random
  5. import uuid
  6. import time
  7. import traceback
  8. from datetime import datetime
  9. import requests
  10. from requests.adapters import HTTPAdapter
  11. from urllib3.util.retry import Retry
  12. sys.path.append(os.getcwd())
  13. from application.common.feishu import FsData
  14. from application.common.feishu.feishu_utils import FeishuUtils
  15. from application.common.gpt import GPT4oMini
  16. from application.common.messageQueue import MQ
  17. from application.common.log import AliyunLogger
  18. from application.functions.zqkd_db_redis import DatabaseOperations, RedisOperations
  19. from application.items import VideoItem
  20. from application.pipeline import PiaoQuanPipeline
  21. from application.common.log import Local
  22. class ZhongQingKanDianRelated:
  23. API_BASE_URL = "http://8.217.192.46:8889"
  24. COMMON_HEADERS = {
  25. "Content-Type": "application/json"
  26. }
  27. # 最大重试次数
  28. MAX_RETRIES = 3
  29. # 最大等待时长
  30. TIMEOUT = 30
  31. def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
  32. """
  33. 初始化
  34. :param platform: 平台名称 zhongqingkandian
  35. :param mode: 运行模式 recommend
  36. :param rule_dict: 规则字典,包含视频数量限制、时长限制等规则 [{"videos_cnt":{"min":100,"max":0}},{"duration":{"min":30,"max":1200}}]
  37. :param user_list: 用户列表
  38. :param env: 运行环境,默认为 "prod"
  39. """
  40. self.limit_flag = True
  41. self.platform = platform
  42. self.mode = mode
  43. self.rule_dict = rule_dict
  44. self.user_list = user_list
  45. self.env = env
  46. self.download_cnt = 0
  47. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  48. self.expire_flag = False
  49. self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
  50. self.db_ops = DatabaseOperations(mode=mode, platform=platform)
  51. self.redis_ops = RedisOperations(mode=mode, platform=platform)
  52. data_rule = FsData()
  53. self.title_rule = data_rule.get_title_rule()
  54. self.LocalLog = Local.logger(self.platform, self.mode)
  55. self.session = requests.session()
  56. def send_request(self, path, data):
  57. """
  58. 同步发送 POST 请求到指定路径,带有重试机制。
  59. :param path: 请求的 API 路径
  60. :param data: 请求的数据
  61. :return: 响应的 JSON 数据,如果请求失败则返回 None
  62. """
  63. full_url = f"{self.API_BASE_URL}{path}"
  64. for retry in range(self.MAX_RETRIES):
  65. try:
  66. response = self.session.post(full_url, data=data, timeout=self.TIMEOUT, headers=self.COMMON_HEADERS)
  67. response.raise_for_status()
  68. self.LocalLog.info(f"{path}响应数据:{response.json()}")
  69. return response.json()
  70. except Exception as e:
  71. tb_info = traceback.format_exc()
  72. self.LocalLog.info(f"{path}请求失败:{e} \n{tb_info}")
  73. self.aliyun_log.logging(
  74. code="3000",
  75. message=f"请求 {path} 失败,错误信息: {str(e)}",
  76. data={"path": path}
  77. )
  78. time.sleep(5)
  79. return None
  80. def is_response_valid(self, resp, url):
  81. """
  82. 检查响应是否有效(状态码为 0 表示有效)。
  83. :param resp: 响应数据
  84. :param url: 请求的 URL
  85. :return: 如果响应有效则返回响应数据,否则返回 None
  86. """
  87. try:
  88. if resp and resp.get('code') != 0:
  89. self.aliyun_log.logging(
  90. code="3000",
  91. message=f"抓取{url}失败,请求失败,响应:{resp}"
  92. )
  93. self.LocalLog.info(f"{url}请求失败,响应:{resp}")
  94. return None
  95. return resp
  96. except Exception as e:
  97. tb_info = traceback.format_exc()
  98. self.aliyun_log.logging(
  99. code="3000",
  100. message=f"检查响应有效性时出错,错误信息: {str(e)}",
  101. data={"url": url, "resp": resp}
  102. )
  103. self.LocalLog.info(f"检查 {url} 响应有效性时出错:{e} \n{tb_info}")
  104. return None
  105. def req_related_recommend_list(self, content_id):
  106. """
  107. 同步请求与指定内容 ID 相关的推荐列表。
  108. :param
  109. :return: 相关推荐视频列表的有效响应数据,如果请求失败则返回 None
  110. """
  111. try:
  112. url = '/crawler/zhong_qing_kan_dian/related'
  113. body = json.dumps({
  114. "content_id": f"{content_id}",
  115. "cursor": ""
  116. })
  117. self.LocalLog.info(f"开始请求相关推荐{body}")
  118. resp = self.send_request(url, body)
  119. return self.is_response_valid(resp, url)
  120. except Exception as e:
  121. tb_info = traceback.format_exc()
  122. self.aliyun_log.logging(
  123. code="1004",
  124. message=f"请求相关推荐视频列表时发生异常,错误信息: {str(e)}",
  125. data={"url": url}
  126. )
  127. self.LocalLog.info(f"请求相关推荐视频列表 {url} 时发生异常:{e} \n{tb_info}")
  128. return None
  129. def req_detail(self, content_link, **kwargs):
  130. """
  131. 同步请求视频详情。
  132. :param content_link: 视频内容链接
  133. :param kwargs: 额外的视频信息
  134. :return: 无返回值,处理视频详情信息
  135. """
  136. try:
  137. self.LocalLog.info(f"开始请求视频详情,链接: {content_link}")
  138. url = '/crawler/zhong_qing_kan_dian/detail'
  139. body = json.dumps({
  140. "content_link": content_link
  141. })
  142. resp = self.send_request(url, body)
  143. if not self.is_response_valid(resp, url):
  144. return
  145. data = resp.get("data", {}).get("data", {})
  146. if data.get("content_type") != "video":
  147. self.aliyun_log.logging(
  148. code="3003",
  149. message=f"跳过非视频内容",
  150. data={"content_link": content_link}
  151. )
  152. self.LocalLog.info(f"跳过非视频内容,链接: {content_link}")
  153. return
  154. self.LocalLog.info(f"{content_link} 是视频")
  155. data.update(kwargs)
  156. self.process_video_obj(data)
  157. except Exception as e:
  158. tb_info = traceback.format_exc()
  159. self.aliyun_log.logging(
  160. code="1005",
  161. message=f"请求视频详情时发生异常,错误信息: {str(e)}",
  162. data={"content_link": content_link}
  163. )
  164. self.LocalLog.error(f"请求视频详情,链接 {content_link} 时发生异常:{e} \n{tb_info}")
  165. def control_request_related(self):
  166. """
  167. 控制相关推荐视频列表的请求和处理流程。
  168. :return: 无返回值,根据下载数量限制控制流程
  169. """
  170. while self.limit_flag:
  171. try:
  172. self.LocalLog.info(f"开始推荐视频列表的请求和处理流程,今日已爬 {self.download_cnt} 个视频")
  173. content_id = self.redis_ops.get_recommend_video()
  174. if not content_id:
  175. self.LocalLog.info("缓存中【task:zqkd_video_id】没有数据")
  176. continue
  177. time.sleep(random.randint(5, 10))
  178. related_resp = self.req_related_recommend_list(content_id)
  179. if not related_resp:
  180. continue
  181. related_list = related_resp.get("data", {}).get("data", [])
  182. self.LocalLog.info(f"获取的推荐列表长度:{len(related_list)}")
  183. for related_obj in related_list:
  184. # if not self.limit_flag:
  185. # self.LocalLog.info(f"今日视频数量已达最大量{self.download_cnt}")
  186. # return
  187. related_content_link = related_obj.get("share_info", {}).get("share_url")
  188. self.LocalLog.info(f"related_content_link == {related_content_link}")
  189. if related_content_link:
  190. time.sleep(random.randint(5, 10))
  191. self.req_detail(related_content_link, **related_obj)
  192. except Exception as e:
  193. tb_info = traceback.format_exc()
  194. self.aliyun_log.logging(
  195. code="3009",
  196. message=f"控制相关推荐视频请求和处理时发生异常,错误信息: {str(e)}",
  197. data={}
  198. )
  199. self.LocalLog.info(f"控制相关推荐视频请求和处理时发生异常:\n{tb_info}")
  200. def process_video_obj(self, video_obj):
  201. """
  202. 处理视频对象,包括检查视频时长、用户信息、保存数据等操作。
  203. :param video_obj: 视频对象,包含视频的各种信息
  204. :return: 无返回值,完成视频对象的处理
  205. """
  206. try:
  207. video_duration = video_obj["video_url_list"][0]['video_duration']
  208. video_id = video_obj['channel_content_id']
  209. # 检查视频ID是否存在
  210. if self.redis_ops.check_video_id_exists(video_id):
  211. self.aliyun_log.logging(
  212. code="3004",
  213. message=f"重复视频ID:{video_id}"
  214. )
  215. self.LocalLog.info(f"重复视频ID: {video_id}")
  216. return
  217. our_user = random.choice(self.user_list)
  218. trace_id = self.platform + str(uuid.uuid1())
  219. item = VideoItem()
  220. account_id = video_obj["channel_account_id"]
  221. account_name = video_obj["channel_account_name"]
  222. account_avatar = video_obj["avatar"]
  223. # 检查用户ID是否存在
  224. """
  225. 需要改为判断redis
  226. """
  227. is_repeat_user = self.db_ops.check_user_id(account_id)
  228. if is_repeat_user:
  229. # 更新用户信息,使用异步方法并等待结果
  230. self.LocalLog.info(f"用户{account_id}已经存在数据库中")
  231. self.db_ops.update_user(account_id, account_name, account_avatar)
  232. else:
  233. self.LocalLog.info(f"用户{account_id}没在数据库中")
  234. # 插入用户信息,使用异步方法并等待结果
  235. self.db_ops.insert_user(account_id, account_name, account_avatar)
  236. self.aliyun_log.logging(code="1007", message=f"用户数据写入成功,用户ID:{account_id}")
  237. self.LocalLog.info(f"用户数据写入成功,用户ID: {account_id}")
  238. if video_duration > self.rule_dict.get("duration", {}).get("max", 1200) or video_duration < self.rule_dict.get("duration", {}).get("min", 30):
  239. self.aliyun_log.logging(
  240. code="3005",
  241. message=f"视频时长不满足条件[>=30s&<=1200s]视频ID:{video_obj['channel_content_id']},视频时长:{video_duration}"
  242. )
  243. self.LocalLog.info(
  244. f"视频时长不满足条件,视频ID: {video_obj['channel_content_id']}, 视频时长: {video_duration}")
  245. return
  246. item.add_video_info("video_id", video_obj['channel_content_id'])
  247. item.add_video_info("video_title", video_obj["title"])
  248. item.add_video_info("play_cnt", int(video_obj["read_count"]))
  249. item.add_video_info("publish_time_stamp", int(int(video_obj["publish_timestamp"]) / 1000))
  250. item.add_video_info("out_user_id", video_obj["channel_account_id"])
  251. item.add_video_info("cover_url", video_obj["image_url_list"][0]['image_url'])
  252. item.add_video_info("like_cnt", 0)
  253. item.add_video_info("collection_cnt", 0)
  254. item.add_video_info("share_cnt", int(video_obj["share_count"]))
  255. item.add_video_info("comment_cnt", int(video_obj["comment_count"]))
  256. item.add_video_info("video_url", video_obj["video_url_list"][0]['video_url'])
  257. item.add_video_info("out_video_id", int(video_obj["channel_content_id"]))
  258. item.add_video_info("duration", video_obj["video_url_list"][0]['video_duration'])
  259. item.add_video_info("platform", self.platform)
  260. item.add_video_info("strategy", self.mode)
  261. item.add_video_info("session", f"{self.platform}-{int(time.time())}")
  262. item.add_video_info("user_id", our_user["uid"])
  263. item.add_video_info("user_name", our_user["nick_name"])
  264. mq_obj = item.produce_item()
  265. pipeline = PiaoQuanPipeline(
  266. platform=self.platform,
  267. mode=self.mode,
  268. rule_dict=self.rule_dict,
  269. env=self.env,
  270. item=mq_obj,
  271. trace_id=traceback.format_exc()
  272. )
  273. if pipeline.process_item():
  274. title_list = self.title_rule.split(",")
  275. title = video_obj["title"]
  276. contains_keyword = any(keyword in title for keyword in title_list)
  277. if contains_keyword:
  278. new_title = GPT4oMini.get_ai_mini_title(title)
  279. if new_title:
  280. item.add_video_info("video_title", new_title)
  281. current_time = datetime.now()
  282. formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
  283. values = [
  284. [
  285. video_obj["video_url_list"][0]['video_url'],
  286. video_obj["image_url_list"][0]['image_url'],
  287. title,
  288. new_title,
  289. formatted_time,
  290. ]
  291. ]
  292. FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "v8S6nL", "ROWS", 1, 2)
  293. time.sleep(0.5)
  294. FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "v8S6nL", "A2:Z2", values)
  295. self.download_cnt += 1
  296. self.mq.send_msg(mq_obj)
  297. self.aliyun_log.logging(
  298. code="2009",
  299. message=f"成功发送视频到etl",
  300. data={"video_obj": video_obj}
  301. )
  302. self.LocalLog.info(f"成功发送etl")
  303. # 保存视频ID
  304. self.redis_ops.save_video_id(video_obj['channel_content_id'])
  305. if self.download_cnt >= self.rule_dict.get("videos_cnt", {}).get("min", 100):
  306. self.limit_flag = False
  307. except Exception as e:
  308. tb_info = traceback.format_exc()
  309. self.aliyun_log.logging(
  310. code="1005",
  311. message=f"处理视频对象时发生异常,错误信息: {str(e)}",
  312. data={"video_obj": video_obj}
  313. )
  314. self.LocalLog.error(f"处理视频对象时发生异常: {e}\n{tb_info}")
  315. def run(self):
  316. """
  317. 运行主流程,执行相关推荐视频的请求,直到达到下载数量限制。
  318. :return: 无返回值,程序运行的主逻辑
  319. """
  320. self.LocalLog.info("开始执行中青看点相关推荐抓取...")
  321. self.control_request_related()
  322. if __name__ == '__main__':
  323. ZhongQingKanDianRelated(
  324. platform="zhongqingkandian",
  325. mode="recommend",
  326. rule_dict={"videos_cnt": {"min": 3, "max": 0}},
  327. user_list=[{"uid": 81525095, "link": "中青看点推荐", "nick_name": "善惡"}]
  328. ).run()