kanyikan_recommend_plus.py 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321
  1. # -*- coding: utf-8 -*-
  2. # @Time: 2023/10/26
  3. import json
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import requests
  9. import urllib3
  10. sys.path.append(os.getcwd())
  11. from common.mq import MQ
  12. from common.common import Common
  13. from common.scheduling_db import MysqlHelper
  14. from common import AliyunLogger
  15. from common.public import get_config_from_mysql, download_rule
  16. proxies = {"http": None, "https": None}
  17. class KanyikanRecommend:
  18. platform = "看一看-plus"
  19. strategy = "随机数据抓取"
  20. @classmethod
  21. def repeat_video(cls, log_type, crawler, video_id, env):
  22. sql = f""" select * from crawler_video where platform in ("{crawler}","{cls.platform}") and create_time>='2023-10-09' and out_video_id="{video_id}"; """
  23. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env)
  24. return len(repeat_video)
  25. @classmethod
  26. def insert_video_id(cls, log_type, crawler, video_id, env):
  27. insert_sql = f"""insert into crawler_kyk_video_id( kyk_video_id , status) values ("{video_id}",0)"""
  28. MysqlHelper.update_values(log_type, crawler, insert_sql, env, action='')
  29. @classmethod
  30. def get_videoList(cls, log_type, crawler, our_uid, rule_dict, env):
  31. mq = MQ(topic_name="topic_crawler_etl_" + env)
  32. try:
  33. session = Common.get_session(log_type, crawler, env)
  34. if session is None:
  35. time.sleep(1)
  36. cls.get_videoList(log_type, crawler, our_uid, rule_dict, env)
  37. for i in range(20):
  38. url = 'https://search.weixin.qq.com/cgi-bin/recwxa/recwxavideolist?'
  39. vid = random.choice(
  40. ["wxv_3183841422983217154", "wxv_2930758110737334272", "wxv_2988109621326512134",
  41. "wxv_2676332817823432706", "wxv_3176172124915433476", "wxv_2844480939899650049",
  42. "wxv_2801905452978274308", "wxv_2946787506342117382", "wxv_2935943471797125120",
  43. "wxv_2756464139115659264", "wxv_3174430452460453896", "wxv_3126758748858908674",
  44. "wxv_3182262442043621385", "wxv_3058491263710314497", "wxv_2952726055449051140",
  45. "wxv_3076106053748015108", "wxv_2074265064492040192", "wxv_2999570992006021122"])
  46. channelid = random.choice(
  47. ["200201", "200", "208", "208201"])
  48. switchnewuser = random.choice(
  49. ["0", "1"])
  50. switchprofile = random.choice(
  51. ["0", "1"])
  52. subscene = random.choice(
  53. ["1089", "1074", "208"])
  54. params = random.choice([{
  55. 'session': session,
  56. "offset": 0,
  57. "wxaVersion": "3.9.2",
  58. "count": "10",
  59. "channelid": channelid,
  60. "scene": '310',
  61. "subscene": subscene,
  62. "clientVersion": '8.0.18',
  63. "sharesearchid": '0',
  64. "nettype": 'wifi',
  65. "switchprofile": switchprofile,
  66. "switchnewuser": switchnewuser,
  67. }, {
  68. "session": session,
  69. "wxaVersion": "3.17.8",
  70. "channelid": channelid,
  71. "vid": vid,
  72. "offset": 0,
  73. "count": "15",
  74. "scene": '310',
  75. "subscene": subscene,
  76. "model": "MacBookPro14%2C111.6.7",
  77. "nettype": 'wifi',
  78. "clientVersion": '3.5.5',
  79. "sharesearchid": '0',
  80. "presearchid": "17530764723864413041",
  81. "sharesource": "0",
  82. "isFromUgc": "false",
  83. "ad": 0,
  84. "switchprofile": switchprofile,
  85. "switchnewuser": switchnewuser,
  86. }])
  87. header = {
  88. 'Host': 'search.weixin.qq.com',
  89. 'Content-Type': 'application/json',
  90. 'X-WX-ClientVersion': '0x33050520',
  91. 'X-WECHAT-UIN': 'b2hfbTQ1WGNjSzQxemdfanpMSml1TEtfbEtsVQ==',
  92. 'Accept': '*/*',
  93. 'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 11_3 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E217 MicroMessenger/6.8.0(0x16080000) NetType/WIFI Language/en Branch/Br_trunk MiniProgramEnv/Mac',
  94. 'Referer': 'https://servicewechat.com/wxbb9a805eb4f9533c/268/page-frame.html',
  95. 'Accept-Language': 'zh-cn'
  96. }
  97. urllib3.disable_warnings()
  98. response = requests.get(url=url, headers=header, params=params, proxies=proxies, verify=False)
  99. # print(response)
  100. if "data" not in response.text:
  101. Common.logger(log_type, crawler).info("获取视频list时,session过期,随机睡眠 31-50 秒")
  102. Common.logging(log_type, crawler, env, "获取视频list时,session过期,随机睡眠 31-50 秒")
  103. AliyunLogger.logging(
  104. code="2000",
  105. platform=crawler,
  106. mode=log_type,
  107. env=env,
  108. message=f"获取视频list时,session过期,随机睡眠 31-50 秒"
  109. )
  110. # 如果返回空信息,则随机睡眠 31-40 秒
  111. time.sleep(random.randint(31, 40))
  112. cls.get_videoList(log_type, crawler, our_uid, rule_dict, env)
  113. elif "items" not in response.json()["data"]:
  114. Common.logger(log_type, crawler).info(f"get_feeds:{response.json()},随机睡眠 1-3 分钟")
  115. Common.logging(log_type, crawler, env, f"get_feeds:{response.json()},随机睡眠 1-3 分钟")
  116. AliyunLogger.logging(
  117. code="2000",
  118. platform=crawler,
  119. mode=log_type,
  120. env=env,
  121. message=f"get_feeds:{response.json()},随机睡眠 1-3 分钟"
  122. )
  123. # 如果返回空信息,则随机睡眠 1-3 分钟
  124. time.sleep(random.randint(60, 180))
  125. cls.get_videoList(log_type, crawler, our_uid, rule_dict, env)
  126. feeds = response.json().get("data", {}).get("items", "")
  127. if feeds == "":
  128. Common.logger(log_type, crawler).info(f"feeds:{feeds}")
  129. Common.logging(log_type, crawler, env, f"feeds:{feeds}")
  130. return
  131. for i in range(len(feeds)):
  132. try:
  133. AliyunLogger.logging(
  134. code="1001",
  135. platform=crawler,
  136. mode=log_type,
  137. env=env,
  138. message='扫描到一条视频\n'
  139. )
  140. video_title = feeds[i].get("title", "").strip().replace("\n", "") \
  141. .replace("/", "").replace("\\", "").replace("\r", "") \
  142. .replace(":", "").replace("*", "").replace("?", "") \
  143. .replace("?", "").replace('"', "").replace("<", "") \
  144. .replace(">", "").replace("|", "").replace(" ", "") \
  145. .replace("&NBSP", "").replace(".", "。").replace(" ", "") \
  146. .replace("'", "").replace("#", "").replace("Merge", "")
  147. publish_time_stamp = feeds[i].get("date", 0)
  148. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
  149. # 获取播放地址
  150. if "videoInfo" not in feeds[i]:
  151. video_url = ""
  152. elif "mpInfo" in feeds[i]["videoInfo"]["videoCdnInfo"]:
  153. if len(feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"]) > 2:
  154. video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"][2]["url"]
  155. else:
  156. video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["mpInfo"]["urlInfo"][0]["url"]
  157. elif "ctnInfo" in feeds[i]["videoInfo"]["videoCdnInfo"]:
  158. video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["ctnInfo"]["urlInfo"][0]["url"]
  159. else:
  160. video_url = feeds[i]["videoInfo"]["videoCdnInfo"]["urlInfo"][0]["url"]
  161. video_id = feeds[i].get("videoId", "")
  162. videoId = "{}kyk_plus".format(video_id)
  163. playCount = int(feeds[i].get("playCount", 0))
  164. shared_cnt = int(feeds[i].get("shared_cnt", 0))
  165. video_dict = {
  166. "video_title": video_title,
  167. "video_id": videoId,
  168. "play_cnt": feeds[i].get("playCount", 0),
  169. "like_cnt": feeds[i].get("liked_cnt", 0),
  170. "comment_cnt": feeds[i].get("comment_cnt", 0),
  171. "share_cnt": feeds[i].get("shared_cnt", 0),
  172. "duration": feeds[i].get("mediaDuration", 0),
  173. "video_width": feeds[i].get("short_video_info", {}).get("width", 0),
  174. "video_height": feeds[i].get("short_video_info", {}).get("height", 0),
  175. "publish_time_stamp": publish_time_stamp,
  176. "publish_time_str": publish_time_str,
  177. "user_name": feeds[i].get("source", "").strip().replace("\n", ""),
  178. "user_id": feeds[i].get("openid", ""),
  179. "avatar_url": feeds[i].get("bizIcon", ""),
  180. "cover_url": feeds[i].get("thumbUrl", ""),
  181. "video_url": video_url,
  182. "session": session,
  183. }
  184. for k, v in video_dict.items():
  185. Common.logger(log_type, crawler).info(f"{k}:{v}")
  186. Common.logging(log_type, crawler, env, f"video_dict:{video_dict}")
  187. AliyunLogger.logging(
  188. code="1000",
  189. platform=crawler,
  190. mode=log_type,
  191. env=env,
  192. message=f"{video_dict}\n"
  193. )
  194. video_percent = '%.2f' % (shared_cnt / playCount)
  195. if float(video_percent) < 0.05:
  196. Common.logger(log_type, crawler).info(f"分享/播放:{video_percent}\n")
  197. Common.logging(log_type, crawler, env, f"分享/播放:{video_percent}\n")
  198. AliyunLogger.logging(
  199. code="2004",
  200. platform=crawler,
  201. mode=log_type,
  202. env=env,
  203. message=f"不符合抓取条件,分享/播放:{video_percent}\n"
  204. )
  205. continue
  206. elif shared_cnt < 800:
  207. Common.logger(log_type, crawler).info(f"播放量:{playCount}\n")
  208. Common.logging(log_type, crawler, env, f"播放量:{playCount}\n")
  209. AliyunLogger.logging(
  210. code="2004",
  211. platform=crawler,
  212. mode=log_type,
  213. env=env,
  214. message=f"不符合抓取条件,播放量:{playCount}\n"
  215. )
  216. continue
  217. if video_dict["video_id"] == "" or video_dict["video_title"] == "" or video_dict["video_url"] == "":
  218. Common.logger(log_type, crawler).info("无效视频\n")
  219. Common.logging(log_type, crawler, env, "无效视频\n")
  220. AliyunLogger.logging(
  221. code="2004",
  222. platform=crawler,
  223. mode=log_type,
  224. env=env,
  225. message=f"无效视频"
  226. )
  227. elif download_rule(log_type=log_type, crawler=crawler, video_dict=video_dict, rule_dict=rule_dict) is False:
  228. Common.logger(log_type, crawler).info("不满足抓取规则\n")
  229. Common.logging(log_type, crawler, env, "不满足抓取规则\n")
  230. AliyunLogger.logging(
  231. code="2004",
  232. platform=crawler,
  233. mode=log_type,
  234. env=env,
  235. message='不满足抓取规则\n'
  236. )
  237. elif any(str(word) if str(word) in video_dict["video_title"] else False
  238. for word in get_config_from_mysql(log_type=log_type,
  239. source=crawler,
  240. env=env,
  241. text="filter",
  242. action="")) is True:
  243. Common.logger(log_type, crawler).info('已中过滤词\n')
  244. Common.logging(log_type, crawler, env, '已中过滤词\n')
  245. AliyunLogger.logging(
  246. code="2004",
  247. platform=crawler,
  248. mode=log_type,
  249. env=env,
  250. message='已中过滤词\n'
  251. )
  252. elif cls.repeat_video(log_type, crawler, video_dict["video_id"], env) != 0:
  253. Common.logger(log_type, crawler).info('视频已下载\n')
  254. Common.logging(log_type, crawler, env, '视频已下载\n')
  255. AliyunLogger.logging(
  256. code="2002",
  257. platform=crawler,
  258. mode=log_type,
  259. env=env,
  260. message='视频已下载\n'
  261. )
  262. else:
  263. video_dict["out_user_id"] = video_dict["user_id"]
  264. video_dict["platform"] = crawler
  265. video_dict["strategy"] = log_type
  266. video_dict["strategy_type"] = "data"
  267. video_dict["out_video_id"] = video_dict["video_id"]
  268. video_dict["width"] = video_dict["video_width"]
  269. video_dict["height"] = video_dict["video_height"]
  270. video_dict["crawler_rule"] = json.dumps(rule_dict)
  271. video_dict["user_id"] = our_uid
  272. video_dict["publish_time"] = video_dict["publish_time_str"]
  273. cls.insert_video_id(log_type, crawler, video_id, env)
  274. AliyunLogger.logging(
  275. code="1010",
  276. platform=crawler,
  277. mode=log_type,
  278. env=env,
  279. message=f"看一看video_id:{video_id}入库",
  280. )
  281. mq.send_msg(video_dict)
  282. time.sleep(random.randint(10, 15))
  283. except Exception as e:
  284. Common.logger(log_type, crawler).error(f"抓取单条视频异常:{e}\n")
  285. Common.logging(log_type, crawler, env, f"抓取单条视频异常:{e}\n")
  286. AliyunLogger.logging(
  287. code="3000",
  288. platform=crawler,
  289. mode=log_type,
  290. env=env,
  291. message=f"抓取单条视频异常:{e}\n"
  292. )
  293. except Exception as e:
  294. Common.logger(log_type, crawler).error(f"抓取列表页时异常:{e}\n")
  295. Common.logging(log_type, crawler, env, f"抓取列表页时异常:{e}\n")
  296. AliyunLogger.logging(
  297. code="3000",
  298. platform=crawler,
  299. mode=log_type,
  300. env=env,
  301. message=f"抓取列表页时异常:{e}\n"
  302. )
  303. if __name__ == "__main__":
  304. KanyikanRecommend.get_videoList(
  305. log_type="recommend",
  306. crawler="kanyikan",
  307. env="prod",
  308. rule_dict={'share_cnt': {'min': 300, 'max': 0}},
  309. our_uid=64080779
  310. )