benshanzhufu_recommend_scheduling.py 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/4/13
  4. # -*- coding: utf-8 -*-
  5. # @Author: wangkun
  6. # @Time: 2022/4/25
  7. import json
  8. import os
  9. import sys
  10. import time
  11. import uuid
  12. from urllib import parse
  13. import requests
  14. import urllib3
  15. sys.path.append(os.getcwd())
  16. from common.mq import MQ
  17. from common.common import Common
  18. from common.scheduling_db import MysqlHelper
  19. from common.public import get_config_from_mysql, download_rule
  20. from common.aliyun_log import AliyunLogger
  21. proxies = {"http": None, "https": None}
  22. class BenshanzhufuRecommend:
  23. platform = "本山祝福"
  24. @classmethod
  25. def repeat_video(cls, log_type, crawler, video_id, env):
  26. sql = f""" select * from crawler_video where platform in ("{crawler}","{cls.platform}") and out_video_id="{video_id}"; """
  27. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env)
  28. return len(repeat_video)
  29. # 推荐列表获取视频
  30. @classmethod
  31. def get_videoList(cls, log_type, crawler, our_uid, rule_dict, env):
  32. mq = MQ(topic_name="topic_crawler_etl_" + env)
  33. # 翻页参数
  34. visitor_key = ""
  35. page = 1
  36. while True:
  37. # try:
  38. now = int(time.time() * 1000)
  39. url = "https://bszf.wentingyou.cn/index.php/v111/index/index?parameter="
  40. header = {
  41. "content-time": str(now),
  42. "chatKey": "wx0fb8149da961d3b0",
  43. "cache-time": str(now),
  44. "User-Agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) "
  45. "AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 "
  46. "MicroMessenger/8.0.20(0x1800142d) NetType/WIFI Language/zh_CN",
  47. "Referer": "https://servicewechat.com/wx0fb8149da961d3b0/2/page-frame.html"
  48. }
  49. parameter = {
  50. "page": page,
  51. "ini_id": visitor_key
  52. }
  53. params = parse.quote(json.dumps(parameter))
  54. url = url + str(params)
  55. urllib3.disable_warnings()
  56. r = requests.get(headers=header, url=url, proxies=proxies, verify=False)
  57. if r.status_code != 200:
  58. Common.logger(log_type, crawler).warning(f"get_videoList:{r.status_code}, {r.text}\n")
  59. Common.logging(log_type, crawler, env, f"get_videoList:{r.status_code}, {r.text}\n")
  60. return
  61. elif r.json()['message'] != "list success":
  62. Common.logger(log_type, crawler).warning(f"get_videoList:{r.status_code}, {r.json()}\n")
  63. Common.logging(log_type, crawler, env, f"get_videoList:{r.status_code}, {r.text}\n")
  64. return
  65. elif "data" not in r.json():
  66. Common.logger(log_type, crawler).warning(f"get_videoList:{r.status_code}, {r.json()}\n")
  67. Common.logging(log_type, crawler, env, f"get_videoList:{r.status_code}, {r.text}\n")
  68. return
  69. elif len(r.json()['data']["list"]) == 0:
  70. Common.logger(log_type, crawler).info(f"没有更多数据了~ {r.json()}\n")
  71. Common.logging(log_type, crawler, env, f"没有更多数据了~ {r.json()}\n")
  72. return
  73. else:
  74. # 翻页
  75. visitor_key = r.json()["data"]["visitor_key"]
  76. page += 1
  77. feeds = r.json()["data"]["list"]
  78. for i in range(len(feeds)):
  79. trace_id = crawler + str(uuid.uuid1())
  80. AliyunLogger.logging(
  81. code="1001",
  82. platform=crawler,
  83. mode=log_type,
  84. env=env,
  85. data=feeds[i],
  86. message="扫描到一条视频"
  87. )
  88. publish_time_stamp = feeds[i].get("update_time", 0)
  89. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
  90. video_url = feeds[i].get("video_url", "")
  91. if ".mp4" not in video_url:
  92. video_url = ""
  93. video_dict = {
  94. 'video_title': feeds[i].get("title", "").replace(" ", "").replace("'", "").replace('"', ""),
  95. 'video_id': str(feeds[i].get("nid", "")),
  96. 'play_cnt': 0,
  97. 'comment_cnt': feeds[i].get("commentCount", 0),
  98. 'like_cnt': 0,
  99. 'share_cnt': 0,
  100. 'publish_time_stamp': publish_time_stamp,
  101. 'publish_time_str': publish_time_str,
  102. 'user_name': "本山祝福",
  103. 'user_id': "benshanzhufu",
  104. 'avatar_url': feeds[i].get("video_cover", ""),
  105. 'cover_url': feeds[i].get("video_cover", ""),
  106. 'video_url': video_url,
  107. 'session': f"benshanzhufu-{int(time.time())}"
  108. }
  109. for k, v in video_dict.items():
  110. Common.logger(log_type, crawler).info(f"{k}:{v}")
  111. Common.logging(log_type, crawler, env, f"video_dict:{video_dict}")
  112. # 过滤无效视频
  113. if video_dict["video_id"] == "" or video_dict["cover_url"] == "" or video_dict["video_url"] == "":
  114. Common.logger(log_type, crawler).info("无效视频\n")
  115. Common.logging(log_type, crawler, env, "无效视频\n")
  116. elif download_rule(log_type=log_type, crawler=crawler, video_dict=video_dict, rule_dict=rule_dict) is False:
  117. Common.logger(log_type, crawler).info("不满足抓取规则\n")
  118. Common.logging(log_type, crawler, env, "不满足抓取规则\n")
  119. elif any(str(word) if str(word) in video_dict["video_title"] else False
  120. for word in get_config_from_mysql(log_type=log_type,
  121. source=crawler,
  122. env=env,
  123. text="filter",
  124. action="")) is True:
  125. Common.logger(log_type, crawler).info('已中过滤词\n')
  126. Common.logging(log_type, crawler, env, '已中过滤词\n')
  127. elif cls.repeat_video(log_type, crawler, video_dict["video_id"], env) != 0:
  128. Common.logger(log_type, crawler).info('视频已下载\n')
  129. Common.logging(log_type, crawler, env, '视频已下载\n')
  130. AliyunLogger.logging(
  131. code="2002",
  132. platform=crawler,
  133. mode=log_type,
  134. message="重复的视频",
  135. data=video_dict,
  136. trace_id=trace_id,
  137. env=env
  138. )
  139. else:
  140. video_dict["out_user_id"] = video_dict["user_id"]
  141. video_dict["platform"] = crawler
  142. video_dict["strategy"] = log_type
  143. video_dict["out_video_id"] = video_dict["video_id"]
  144. video_dict["width"] = 0
  145. video_dict["height"] = 0
  146. video_dict["crawler_rule"] = json.dumps(rule_dict)
  147. video_dict["user_id"] = our_uid
  148. video_dict["publish_time"] = video_dict["publish_time_str"]
  149. video_dict["fans_cnt"] = 0
  150. video_dict["videos_cnt"] = 0
  151. mq.send_msg(video_dict)
  152. AliyunLogger.logging(
  153. code="1002",
  154. platform=crawler,
  155. mode=log_type,
  156. message="成功发送至 ETL",
  157. data=video_dict,
  158. trace_id=trace_id,
  159. env=env
  160. )
  161. # except Exception as e:
  162. # Common.logger(log_type, crawler).info(f"抓取单条视频异常:{e}\n")
  163. # except Exception as e:
  164. # Common.logger(log_type, crawler).error(f"抓取第{page}页时异常:{e}\n")
  165. if __name__ == "__main__":
  166. print(get_config_from_mysql("recommend", "benshanzhufu", "dev", "filter"))
  167. pass