gzh_recommend.py 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/1
  4. # import time
  5. # import base64
  6. import json
  7. import os
  8. import time
  9. # import urllib.parse
  10. import requests
  11. import urllib3
  12. from crawler_gzh.main.common import Common
  13. from crawler_gzh.main.feishu_lib import Feishu
  14. from crawler_gzh.main.publish import Publish
  15. proxies = {"http": None, "https": None}
  16. class Recommend:
  17. # 获取 token,保存至飞书云文档
  18. @classmethod
  19. def get_token(cls, log_type):
  20. # charles 抓包文件保存目录
  21. charles_file_dir = "./crawler-kanyikan-recommend/chlsfiles/"
  22. if int(len(os.listdir(charles_file_dir))) == 1:
  23. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  24. time.sleep(60)
  25. else:
  26. try:
  27. # 目标文件夹下所有文件
  28. all_file = sorted(os.listdir(charles_file_dir))
  29. # 获取到目标文件
  30. old_file = all_file[-1]
  31. # 分离文件名与扩展名
  32. new_file = os.path.splitext(old_file)
  33. # 重命名文件后缀
  34. os.rename(os.path.join(charles_file_dir, old_file),
  35. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  36. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  37. contents = json.load(f, strict=False)
  38. # 定义需要返回的列表
  39. request_info = []
  40. for content in contents:
  41. if "mp.weixin.qq.com" in content['host']:
  42. if content["path"] == r"/mp/getappmsgext":
  43. headers = content["request"]["header"]["headers"]
  44. title = content["request"]["body"]["text"].split("title=")[-1].split("&ct=")[0]
  45. vid = content["request"]["body"]["text"].split("vid=")[-1].split("&is_pay_subscribe")[0]
  46. request_info.append(title)
  47. request_info.append(vid)
  48. Feishu.update_values("recommend", "gzh", "VzrN7E", "B1:B1", [[title]])
  49. time.sleep(1)
  50. Feishu.update_values("recommend", "gzh", "VzrN7E", "B2:B2", [[vid]])
  51. for h in headers:
  52. if h["name"] == "cookie" and "pass_ticket" in h["value"]:
  53. pass_ticket = h["value"].split("pass_ticket=")[-1]
  54. # print(f"pass_ticket:{pass_ticket}")
  55. request_info.append(pass_ticket)
  56. Feishu.update_values("recommend", "gzh", "VzrN7E", "B5:B5", [[pass_ticket]])
  57. if h["name"] == "referer":
  58. __biz = h["value"].split("__biz=")[-1].split("&mid=")[0]
  59. # print(f"__biz:{__biz}")
  60. request_info.append(__biz)
  61. Feishu.update_values("recommend", "gzh", "VzrN7E", "B3:B3", [[__biz]])
  62. if h["name"] == "cookie" and "appmsg_token" in h["value"]:
  63. appmsg_token = h["value"].split("appmsg_token=")[-1]
  64. # print(f"appmsg_token:{appmsg_token}")
  65. request_info.append(appmsg_token)
  66. Feishu.update_values("recommend", "gzh", "VzrN7E", "B4:B4", [[appmsg_token]])
  67. if h["name"] == "cookie" and "wap_sid2" in h["value"]:
  68. wap_sid2 = h["value"].split("wap_sid2=")[-1]
  69. # print(f"wap_sid2:{wap_sid2}")
  70. request_info.append(wap_sid2)
  71. Feishu.update_values("recommend", "gzh", "VzrN7E", "B6:B6", [[wap_sid2]])
  72. return request_info
  73. except Exception as e:
  74. Common.logger(log_type).error("获取session异常,30s后重试:{}", e)
  75. time.sleep(30)
  76. cls.get_token(log_type)
  77. # 获取推荐列表
  78. @classmethod
  79. def get_recommend(cls, log_type):
  80. try:
  81. token_sheet = Feishu.get_values_batch("recommend", "gzh", "VzrN7E")
  82. if token_sheet is None:
  83. Common.logger(log_type).info("未获取到token等信息,30s后重试")
  84. time.sleep(30)
  85. cls.get_recommend(log_type)
  86. else:
  87. title = token_sheet[0][1]
  88. vid = token_sheet[1][1]
  89. __biz = token_sheet[2][1]
  90. appmsg_token = token_sheet[3][1]
  91. pass_ticket = token_sheet[4][1]
  92. wap_sid2 = token_sheet[5][1]
  93. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  94. headers = {
  95. # "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
  96. "content-type": 'text/plain',
  97. "accept": "*/*",
  98. "x-requested-with": "XMLHttpRequest",
  99. "accept-language": "zh-cn",
  100. "accept-encoding": "gzip, deflate, br",
  101. "origin": "https://mp.weixin.qq.com",
  102. "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 "
  103. "(KHTML, like Gecko) Mobile/15E148 MicroMessenger/8.0.26(0x18001a29)"
  104. " NetType/WIFI Language/zh_CN",
  105. "referer": "https://mp.weixin.qq.com/s?"
  106. "__biz=Mzg5MDY2NzY5Nw=="
  107. "&mid=2247484710"
  108. "&idx=1"
  109. "&sn=657a341da42ed071aaa4d3ce853f64f2"
  110. "&chksm=cfd852f8f8afdbeef513340dec8702433bd78137e7b4afb665d1de5014dc6837ed4dcc979684"
  111. "&sessionid=1659509075"
  112. "&channel_session_id="
  113. "&scene=136"
  114. "&subscene="
  115. "&exptype="
  116. "&reloadid=1659509075"
  117. "&reloadseq=2"
  118. "&related_video_source=10"
  119. "&ascene=1"
  120. "&devicetype=iOS14.7.1"
  121. "&version=18001a29"
  122. "&nettype=WIFI"
  123. "&abtest_cookie=AAACAA%3D%3D"
  124. "&lang=zh_CN"
  125. "&session_us=gh_7364edd0ca9f"
  126. "&fontScale=100"
  127. "&exportkey=AdT9lhjADCG9r69d1meNZ5c%3D"
  128. "&pass_ticket=" + pass_ticket +
  129. "&wx_header=3",
  130. }
  131. query_string = {
  132. "f": "json",
  133. "mock": "",
  134. "uin": "777",
  135. "key": "777",
  136. "pass_ticket": pass_ticket,
  137. "wxtoken": "",
  138. "devicetype": "iOS14.7.1",
  139. "clientversion": "18001a29",
  140. "__biz": __biz,
  141. "appmsg_token": appmsg_token,
  142. "x5": "0",
  143. # "f": "json"
  144. }
  145. cookies = {
  146. "appmsg_token": appmsg_token,
  147. "devicetype": "iOS14.7.1",
  148. "lang": "zh_CN",
  149. "pass_ticket": pass_ticket,
  150. "rewardsn": "",
  151. "version": "18001a29",
  152. "wap_sid2": wap_sid2,
  153. "wxtokenkey": "777",
  154. "wxuin": "2010747860"
  155. }
  156. form = {
  157. "r": "0.13440037781889225",
  158. "__biz": __biz,
  159. "appmsg_type": "9",
  160. "mid": "2247484710",
  161. "sn": "657a341da42ed071aaa4d3ce853f64f2",
  162. "idx": "1",
  163. "scene": "136",
  164. "title": title,
  165. "ct": "1654824718",
  166. "abtest_cookie": "AAACAA==",
  167. "devicetype": "iOS14.7.1",
  168. "version": "18001a29",
  169. "is_need_ticket": "0",
  170. "is_need_ad": "1",
  171. "comment_id": "0",
  172. "is_need_reward": "0",
  173. "both_ad": "0",
  174. "reward_uin_count": "0",
  175. "send_time": "",
  176. "msg_daily_idx": "1",
  177. "is_original": "0",
  178. "is_only_read": "1",
  179. "req_id": "0314yH9rphN660ejUCz1hRVD",
  180. "pass_ticket": pass_ticket,
  181. "is_temp_url": "0",
  182. "item_show_type": "5",
  183. "tmp_version": "1",
  184. "more_read_type": "0",
  185. "appmsg_like_type": "2",
  186. "related_video_sn": "",
  187. "related_video_num": "5",
  188. "vid": vid,
  189. "is_pay_subscribe": "0",
  190. "pay_subscribe_uin_count": "0",
  191. "has_red_packet_cover": "0",
  192. "album_id": "1296223588617486300",
  193. "album_video_num": "5",
  194. "cur_album_id": "",
  195. "is_public_related_video": "0",
  196. "encode_info_by_base64": "0",
  197. "exptype": ""
  198. }
  199. urllib3.disable_warnings()
  200. response = requests.post(url=url, headers=headers, cookies=cookies, params=query_string, data=form,
  201. verify=False, proxies=proxies)
  202. if "related_tag_video" not in response.json():
  203. Common.logger(log_type).warning("response:{}\n", response.text)
  204. elif len(response.json()["related_tag_video"]) == 0:
  205. Common.logger(log_type).warning("response:{}\n", response.text)
  206. time.sleep(10)
  207. cls.get_recommend(log_type)
  208. else:
  209. feeds = response.json()["related_tag_video"]
  210. for m in range(len(feeds)):
  211. # video_title
  212. if "title" not in feeds[m]:
  213. video_title = 0
  214. else:
  215. video_title = feeds[m]["title"]
  216. # video_title = base64.b64decode(video_title).decode("utf-8")
  217. # video_id
  218. if "vid" not in feeds[m]:
  219. video_id = 0
  220. else:
  221. video_id = feeds[m]["vid"]
  222. # play_cnt
  223. if "read_num" not in feeds[m]:
  224. play_cnt = 0
  225. else:
  226. play_cnt = feeds[m]["read_num"]
  227. # like_cnt
  228. if "like_num" not in feeds[m]:
  229. like_cnt = 0
  230. else:
  231. like_cnt = feeds[m]["like_num"]
  232. # duration
  233. if "duration" not in feeds[m]:
  234. duration = 0
  235. else:
  236. duration = feeds[m]["duration"]
  237. # video_width / video_height
  238. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  239. video_width = 0
  240. video_height = 0
  241. else:
  242. video_width = feeds[m]["videoWidth"]
  243. video_height = feeds[m]["videoHeight"]
  244. # send_time
  245. if "pubTime" not in feeds[m]:
  246. send_time = 0
  247. else:
  248. send_time = feeds[m]["pubTime"]
  249. # user_name
  250. if "srcDisplayName" not in feeds[m]:
  251. user_name = 0
  252. else:
  253. user_name = feeds[m]["srcDisplayName"]
  254. # user_name = base64.b64decode(user_name).decode("utf-8")
  255. # user_id
  256. if "srcUserName" not in feeds[m]:
  257. user_id = 0
  258. else:
  259. user_id = feeds[m]["srcUserName"]
  260. # head_url
  261. if "head_img_url" not in feeds[m]:
  262. head_url = 0
  263. else:
  264. head_url = feeds[m]["head_img_url"]
  265. # cover_url
  266. if "cover" not in feeds[m]:
  267. cover_url = 0
  268. else:
  269. cover_url = feeds[m]["cover"]
  270. # video_url
  271. if "url" not in feeds[m]:
  272. video_url = 0
  273. else:
  274. video_url = feeds[m]["url"]
  275. # 下载链接
  276. download_url = cls.get_url(log_type, video_url)
  277. Common.logger(log_type).info("video_title:{}", video_title)
  278. Common.logger(log_type).info("video_id:{}", video_id)
  279. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  280. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  281. Common.logger(log_type).info("duration:{}", duration)
  282. Common.logger(log_type).info("video_width:{}", video_width)
  283. Common.logger(log_type).info("video_height:{}", video_height)
  284. Common.logger(log_type).info("send_time:{}", send_time)
  285. Common.logger(log_type).info("user_name:{}", user_name)
  286. Common.logger(log_type).info("user_id:{}", user_id)
  287. Common.logger(log_type).info("head_url:{}", head_url)
  288. Common.logger(log_type).info("cover_url:{}", cover_url)
  289. Common.logger(log_type).info("video_url:{}", video_url)
  290. Common.logger(log_type).info("download_url:{}", download_url)
  291. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  292. Common.logger(log_type).info("无效视频\n")
  293. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  294. y]:
  295. Common.logger(log_type).info("该视频已下载\n")
  296. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  297. y]:
  298. Common.logger(log_type).info("该视频已在feeds中\n")
  299. else:
  300. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  301. get_feeds_time = int(time.time())
  302. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  303. "推荐榜",
  304. video_title,
  305. str(video_id),
  306. play_cnt,
  307. like_cnt,
  308. duration,
  309. str(video_width) + "*" + str(video_height),
  310. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  311. user_name,
  312. user_id,
  313. head_url,
  314. cover_url,
  315. video_url,
  316. download_url
  317. ]]
  318. time.sleep(1)
  319. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  320. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  321. except Exception as e:
  322. Common.logger(log_type).error("get_recommend异常:{}", e)
  323. # 获取视频下载链接
  324. @classmethod
  325. def get_url(cls, log_type, url):
  326. try:
  327. payload = {}
  328. headers = {
  329. 'Cookie': 'rewardsn=; wxtokenkey=777'
  330. }
  331. urllib3.disable_warnings()
  332. response = requests.get(url=url, headers=headers, data=payload, verify=False, proxies=proxies)
  333. response_list = response.text.splitlines()
  334. video_url_list = []
  335. for m in response_list:
  336. if "mpvideo.qpic.cn" in m:
  337. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  338. video_url_list.append(video_url)
  339. video_url = video_url_list[0]
  340. return video_url
  341. except Exception as e:
  342. Common.logger(log_type).error("get_url异常:{}", e)
  343. # 下载/上传
  344. @classmethod
  345. def download_publish(cls, log_type, env):
  346. try:
  347. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  348. for i in range(1, len(recommend_feeds_sheet)):
  349. download_video_title = recommend_feeds_sheet[i][5]
  350. download_video_id = recommend_feeds_sheet[i][6]
  351. download_video_play_cnt = recommend_feeds_sheet[i][7]
  352. download_video_like_cnt = recommend_feeds_sheet[i][8]
  353. download_video_duration = recommend_feeds_sheet[i][9]
  354. download_width_height = recommend_feeds_sheet[i][10]
  355. download_video_send_time = recommend_feeds_sheet[i][11]
  356. download_user_name = recommend_feeds_sheet[i][12]
  357. download_user_id = recommend_feeds_sheet[i][13]
  358. download_head_url = recommend_feeds_sheet[i][14]
  359. download_cover_url = recommend_feeds_sheet[i][15]
  360. download_video_url = recommend_feeds_sheet[i][17]
  361. download_video_comment_cnt = 0
  362. download_video_share_cnt = 0
  363. Common.logger(log_type).info("正在判断第{}行", i + 1)
  364. Common.logger(log_type).info("download_video_title:{}", download_video_title)
  365. Common.logger(log_type).info("download_video_id:{}", download_video_id)
  366. Common.logger(log_type).info("download_video_play_cnt:{}", download_video_play_cnt)
  367. Common.logger(log_type).info("download_video_duration:{}", download_video_duration)
  368. Common.logger(log_type).info("download_video_send_time:{}", download_video_send_time)
  369. Common.logger(log_type).info("download_video_url:{}\n", download_video_url)
  370. # Common.logger(log_type).info("download_video_like_cnt:{}", download_video_like_cnt)
  371. # Common.logger(log_type).info("download_width_height:{}", download_width_height)
  372. # Common.logger(log_type).info("download_user_name:{}", download_user_name)
  373. # Common.logger(log_type).info("download_user_id:{}", download_user_id)
  374. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  375. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  376. # 过滤空行
  377. if download_video_id is None or download_video_title is None or download_video_play_cnt is None:
  378. Common.logger(log_type).warning("空行,略过\n")
  379. # # 过滤敏感词
  380. # elif any(word if word in download_video_title else False for word in
  381. # cls.sensitive_words(log_type)) is True:
  382. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  383. # Common.logger(log_type).info("视频已中敏感词,删除成功\n")
  384. # return
  385. # # 下载规则
  386. # elif cls.download_rule(download_video_share_cnt, download_video_play_cnt) is False:
  387. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  388. # Common.logger(log_type).info("不满足下载规则,删除成功\n")
  389. # return
  390. # 时长小于 60s,删除
  391. elif int(download_video_duration) < 60:
  392. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  393. Common.logger(log_type).info("时长{}<60,删除成功\n", download_video_duration)
  394. return
  395. # 已下载视频表去重
  396. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "gzh", "fCs3BT")
  397. for n in m]:
  398. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  399. Common.logger(log_type).info("该视频在公众号中已下载,删除成功\n")
  400. return
  401. # 看一看已下载表去重
  402. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "kanyikan", "20ce0c")
  403. for n in m]:
  404. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  405. Common.logger(log_type).info("该视频在看一看中已下载,删除成功\n")
  406. return
  407. else:
  408. # 下载封面
  409. Common.download_method(log_type=log_type, text="cover",
  410. d_name=str(download_video_title), d_url=str(download_cover_url))
  411. # 下载视频
  412. Common.download_method(log_type=log_type, text="video",
  413. d_name=str(download_video_title), d_url=str(download_video_url))
  414. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  415. with open("./videos/" + download_video_title + "/" + "info.txt",
  416. "a", encoding="UTF-8") as f_a:
  417. f_a.write(str(download_video_id) + "\n" +
  418. str(download_video_title) + "\n" +
  419. str(download_video_duration) + "\n" +
  420. str(download_video_play_cnt) + "\n" +
  421. str(download_video_comment_cnt) + "\n" +
  422. str(download_video_like_cnt) + "\n" +
  423. str(download_video_share_cnt) + "\n" +
  424. str(download_width_height) + "\n" +
  425. str(int(time.mktime(
  426. time.strptime(download_video_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  427. str(download_user_name) + "\n" +
  428. str(download_head_url) + "\n" +
  429. str(download_video_url) + "\n" +
  430. str(download_cover_url) + "\n" +
  431. "gzh")
  432. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  433. # 上传视频
  434. Common.logger(log_type).info("开始上传视频:{}".format(download_video_title))
  435. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  436. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  437. Common.logger(log_type).info("视频上传完成:{}", download_video_title)
  438. # 保存视频 ID 到云文档
  439. Common.logger(log_type).info("保存视频ID至云文档:{}", download_video_title)
  440. # 视频ID工作表,插入首行
  441. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  442. # 视频ID工作表,首行写入数据
  443. upload_time = int(time.time())
  444. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  445. "推荐榜",
  446. str(download_video_title),
  447. str(download_video_id),
  448. our_video_link,
  449. download_video_play_cnt,
  450. download_video_like_cnt,
  451. download_video_duration,
  452. str(download_width_height),
  453. str(download_video_send_time),
  454. str(download_user_name),
  455. str(download_user_id),
  456. str(download_head_url),
  457. str(download_cover_url),
  458. str(download_video_url)]]
  459. time.sleep(1)
  460. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  461. # 删除行或列,可选 ROWS、COLUMNS
  462. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  463. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_video_title)
  464. return
  465. except Exception as e:
  466. Common.logger(log_type).error("download_publish异常:{}", e)
  467. # 执行下载/上传
  468. @classmethod
  469. def run_download_publish(cls, log_type, env):
  470. try:
  471. while True:
  472. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  473. if len(recommend_feeds_sheet) == 1:
  474. Common.logger(log_type).info("下载/上传完成")
  475. break
  476. else:
  477. cls.download_publish(log_type, env)
  478. except Exception as e:
  479. Common.logger(log_type).error("run_download_publish异常:{}", e)
  480. if __name__ == "__main__":
  481. Recommend.get_recommend("recommend")
  482. # Recommend.download_publish("recommend")
  483. # Recommend.run_download_publish("recommend", "dev")
  484. # token = Recommend.get_token("recommend")
  485. # print(token)