gzh_recommend.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/1
  4. # import time
  5. # import base64
  6. import json
  7. import os
  8. import time
  9. # import urllib.parse
  10. import requests
  11. import urllib3
  12. from main.common import Common
  13. from main.feishu_lib import Feishu
  14. from main.publish import Publish
  15. proxies = {"http": None, "https": None}
  16. class Recommend:
  17. # 获取 token,保存至飞书云文档
  18. @classmethod
  19. def get_token(cls, log_type):
  20. # charles 抓包文件保存目录
  21. charles_file_dir = "../crawler-kanyikan-recommend/chlsfiles/"
  22. if int(len(os.listdir(charles_file_dir))) == 1:
  23. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  24. time.sleep(60)
  25. else:
  26. try:
  27. # 目标文件夹下所有文件
  28. all_file = sorted(os.listdir(charles_file_dir))
  29. # 获取到目标文件
  30. old_file = all_file[-1]
  31. # 分离文件名与扩展名
  32. new_file = os.path.splitext(old_file)
  33. # 重命名文件后缀
  34. os.rename(os.path.join(charles_file_dir, old_file),
  35. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  36. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  37. contents = json.load(f, strict=False)
  38. # 定义需要返回的列表
  39. request_info = []
  40. for content in contents:
  41. if "mp.weixin.qq.com" in content['host']:
  42. if content["path"] == r"/mp/getappmsgext":
  43. headers = content["request"]["header"]["headers"]
  44. title = content["request"]["body"]["text"].split("title=")[-1].split("&ct=")[0]
  45. vid = content["request"]["body"]["text"].split("vid=")[-1].split("&is_pay_subscribe")[0]
  46. request_info.append(title)
  47. request_info.append(vid)
  48. for h in headers:
  49. if h["name"] == "cookie" and "pass_ticket" in h["value"]:
  50. pass_ticket = h["value"].split("pass_ticket=")[-1]
  51. # print(f"pass_ticket:{pass_ticket}")
  52. request_info.append(pass_ticket)
  53. if h["name"] == "referer":
  54. __biz = h["value"].split("__biz=")[-1].split("&mid=")[0]
  55. # print(f"__biz:{__biz}")
  56. request_info.append(__biz)
  57. if h["name"] == "cookie" and "appmsg_token" in h["value"]:
  58. appmsg_token = h["value"].split("appmsg_token=")[-1]
  59. # print(f"appmsg_token:{appmsg_token}")
  60. request_info.append(appmsg_token)
  61. if h["name"] == "cookie" and "wap_sid2" in h["value"]:
  62. wap_sid2 = h["value"].split("wap_sid2=")[-1]
  63. # print(f"wap_sid2:{wap_sid2}")
  64. request_info.append(wap_sid2)
  65. return request_info
  66. except Exception as e:
  67. Common.logger(log_type).error("获取session异常,30s后重试:{}", e)
  68. time.sleep(30)
  69. cls.get_token(log_type)
  70. # 获取推荐列表
  71. @classmethod
  72. def get_recommend(cls, log_type):
  73. try:
  74. params = cls.get_token(log_type)
  75. if params is None:
  76. Common.logger(log_type).info("未获取到token等信息,30s后重试")
  77. time.sleep(30)
  78. cls.get_recommend(log_type)
  79. else:
  80. title = params[0]
  81. vid = params[1]
  82. __biz = params[2]
  83. appmsg_token = params[3]
  84. pass_ticket = params[4]
  85. wap_sid2 = params[5]
  86. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  87. headers = {
  88. "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
  89. "accept": "*/*",
  90. "x-requested-with": "XMLHttpRequest",
  91. "accept-language": "zh-cn",
  92. "accept-encoding": "gzip, deflate, br",
  93. "origin": "https://mp.weixin.qq.com",
  94. "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 "
  95. "(KHTML, like Gecko) Mobile/15E148 MicroMessenger/8.0.26(0x18001a29)"
  96. " NetType/WIFI Language/zh_CN",
  97. "referer": "https://mp.weixin.qq.com/s?"
  98. "__biz=Mzg5MDY2NzY5Nw=="
  99. "&mid=2247484710"
  100. "&idx=1"
  101. "&sn=657a341da42ed071aaa4d3ce853f64f2"
  102. "&chksm=cfd852f8f8afdbeef513340dec8702433bd78137e7b4afb665d1de5014dc6837ed4dcc979684"
  103. "&sessionid=1659509075"
  104. "&channel_session_id="
  105. "&scene=136"
  106. "&subscene="
  107. "&exptype="
  108. "&reloadid=1659509075"
  109. "&reloadseq=2"
  110. "&related_video_source=10"
  111. "&ascene=1"
  112. "&devicetype=iOS14.7.1"
  113. "&version=18001a29"
  114. "&nettype=WIFI"
  115. "&abtest_cookie=AAACAA%3D%3D"
  116. "&lang=zh_CN"
  117. "&session_us=gh_7364edd0ca9f"
  118. "&fontScale=100"
  119. "&exportkey=AdT9lhjADCG9r69d1meNZ5c%3D"
  120. "&pass_ticket=" + pass_ticket +
  121. "&wx_header=3",
  122. }
  123. query_string = {
  124. "f": "json",
  125. "mock": "",
  126. "uin": "777",
  127. "key": "777",
  128. "pass_ticket": pass_ticket,
  129. "wxtoken": "",
  130. "devicetype": "iOS14.7.1",
  131. "clientversion": "18001a29",
  132. "__biz": __biz,
  133. "appmsg_token": appmsg_token,
  134. "x5": "0",
  135. # "f": "json"
  136. }
  137. cookies = {
  138. "appmsg_token": appmsg_token,
  139. "devicetype": "iOS14.7.1",
  140. "lang": "zh_CN",
  141. "pass_ticket": pass_ticket,
  142. "rewardsn": "",
  143. "version": "18001a29",
  144. "wap_sid2": wap_sid2,
  145. "wxtokenkey": "777",
  146. "wxuin": "2010747860"
  147. }
  148. form = {
  149. "r": "0.13440037781889225",
  150. "__biz": __biz,
  151. "appmsg_type": "9",
  152. "mid": "2247484710",
  153. "sn": "657a341da42ed071aaa4d3ce853f64f2",
  154. "idx": "1",
  155. "scene": "136",
  156. "title": title,
  157. "ct": "1654824718",
  158. "abtest_cookie": "AAACAA==",
  159. "devicetype": "iOS14.7.1",
  160. "version": "18001a29",
  161. "is_need_ticket": "0",
  162. "is_need_ad": "1",
  163. "comment_id": "0",
  164. "is_need_reward": "0",
  165. "both_ad": "0",
  166. "reward_uin_count": "0",
  167. "send_time": "",
  168. "msg_daily_idx": "1",
  169. "is_original": "0",
  170. "is_only_read": "1",
  171. "req_id": "0314yH9rphN660ejUCz1hRVD",
  172. "pass_ticket": pass_ticket,
  173. "is_temp_url": "0",
  174. "item_show_type": "5",
  175. "tmp_version": "1",
  176. "more_read_type": "0",
  177. "appmsg_like_type": "2",
  178. "related_video_sn": "",
  179. "related_video_num": "5",
  180. "vid": vid,
  181. "is_pay_subscribe": "0",
  182. "pay_subscribe_uin_count": "0",
  183. "has_red_packet_cover": "0",
  184. "album_id": "1296223588617486300",
  185. "album_video_num": "5",
  186. "cur_album_id": "",
  187. "is_public_related_video": "0",
  188. "encode_info_by_base64": "0",
  189. "exptype": ""
  190. }
  191. urllib3.disable_warnings()
  192. response = requests.post(url=url, headers=headers, cookies=cookies, params=query_string, data=form,
  193. verify=False)
  194. if "related_tag_video" not in response.json():
  195. Common.logger(log_type).warning("response:{}\n", response.text)
  196. elif len(response.json()["related_tag_video"]) == 0:
  197. Common.logger(log_type).warning("response:{}\n", response.text)
  198. time.sleep(10)
  199. cls.get_recommend(log_type)
  200. else:
  201. feeds = response.json()["related_tag_video"]
  202. for m in range(len(feeds)):
  203. # video_title
  204. if "title" not in feeds[m]:
  205. video_title = 0
  206. else:
  207. video_title = feeds[m]["title"]
  208. # video_title = base64.b64decode(video_title).decode("utf-8")
  209. # video_id
  210. if "vid" not in feeds[m]:
  211. video_id = 0
  212. else:
  213. video_id = feeds[m]["vid"]
  214. # play_cnt
  215. if "read_num" not in feeds[m]:
  216. play_cnt = 0
  217. else:
  218. play_cnt = feeds[m]["read_num"]
  219. # like_cnt
  220. if "like_num" not in feeds[m]:
  221. like_cnt = 0
  222. else:
  223. like_cnt = feeds[m]["like_num"]
  224. # duration
  225. if "duration" not in feeds[m]:
  226. duration = 0
  227. else:
  228. duration = feeds[m]["duration"]
  229. # video_width / video_height
  230. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  231. video_width = 0
  232. video_height = 0
  233. else:
  234. video_width = feeds[m]["videoWidth"]
  235. video_height = feeds[m]["videoHeight"]
  236. # send_time
  237. if "pubTime" not in feeds[m]:
  238. send_time = 0
  239. else:
  240. send_time = feeds[m]["pubTime"]
  241. # user_name
  242. if "srcDisplayName" not in feeds[m]:
  243. user_name = 0
  244. else:
  245. user_name = feeds[m]["srcDisplayName"]
  246. # user_name = base64.b64decode(user_name).decode("utf-8")
  247. # user_id
  248. if "srcUserName" not in feeds[m]:
  249. user_id = 0
  250. else:
  251. user_id = feeds[m]["srcUserName"]
  252. # head_url
  253. if "head_img_url" not in feeds[m]:
  254. head_url = 0
  255. else:
  256. head_url = feeds[m]["head_img_url"]
  257. # cover_url
  258. if "cover" not in feeds[m]:
  259. cover_url = 0
  260. else:
  261. cover_url = feeds[m]["cover"]
  262. # video_url
  263. if "url" not in feeds[m]:
  264. video_url = 0
  265. else:
  266. video_url = feeds[m]["url"]
  267. # 下载链接
  268. download_url = cls.get_url(log_type, video_url)
  269. Common.logger(log_type).info("video_title:{}", video_title)
  270. Common.logger(log_type).info("video_id:{}", video_id)
  271. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  272. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  273. Common.logger(log_type).info("duration:{}", duration)
  274. Common.logger(log_type).info("video_width:{}", video_width)
  275. Common.logger(log_type).info("video_height:{}", video_height)
  276. Common.logger(log_type).info("send_time:{}", send_time)
  277. Common.logger(log_type).info("user_name:{}", user_name)
  278. Common.logger(log_type).info("user_id:{}", user_id)
  279. Common.logger(log_type).info("head_url:{}", head_url)
  280. Common.logger(log_type).info("cover_url:{}", cover_url)
  281. Common.logger(log_type).info("video_url:{}", video_url)
  282. Common.logger(log_type).info("download_url:{}", download_url)
  283. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  284. Common.logger(log_type).info("无效视频\n")
  285. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  286. y]:
  287. Common.logger(log_type).info("该视频已下载\n")
  288. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  289. y]:
  290. Common.logger(log_type).info("该视频已在feeds中\n")
  291. else:
  292. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  293. get_feeds_time = int(time.time())
  294. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  295. "推荐榜",
  296. video_title,
  297. str(video_id),
  298. play_cnt,
  299. like_cnt,
  300. duration,
  301. str(video_width) + "*" + str(video_height),
  302. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  303. user_name,
  304. user_id,
  305. head_url,
  306. cover_url,
  307. video_url,
  308. download_url
  309. ]]
  310. time.sleep(1)
  311. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  312. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  313. except Exception as e:
  314. Common.logger(log_type).error("get_recommend异常:{}", e)
  315. # 获取视频下载链接
  316. @classmethod
  317. def get_url(cls, log_type, url):
  318. try:
  319. payload = {}
  320. headers = {
  321. 'Cookie': 'rewardsn=; wxtokenkey=777'
  322. }
  323. urllib3.disable_warnings()
  324. response = requests.get(url=url, headers=headers, data=payload, verify=False)
  325. response_list = response.text.splitlines()
  326. video_url_list = []
  327. for m in response_list:
  328. if "mpvideo.qpic.cn" in m:
  329. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  330. video_url_list.append(video_url)
  331. video_url = video_url_list[0]
  332. return video_url
  333. except Exception as e:
  334. Common.logger(log_type).error("get_url异常:{}", e)
  335. # 下载/上传
  336. @classmethod
  337. def download_publish(cls, log_type, env):
  338. try:
  339. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  340. for i in range(1, len(recommend_feeds_sheet)):
  341. download_video_title = recommend_feeds_sheet[i][5]
  342. download_video_id = recommend_feeds_sheet[i][6]
  343. download_video_play_cnt = recommend_feeds_sheet[i][7]
  344. download_video_like_cnt = recommend_feeds_sheet[i][8]
  345. download_video_duration = recommend_feeds_sheet[i][9]
  346. download_width_height = recommend_feeds_sheet[i][10]
  347. download_video_send_time = recommend_feeds_sheet[i][11]
  348. download_user_name = recommend_feeds_sheet[i][12]
  349. download_user_id = recommend_feeds_sheet[i][13]
  350. download_head_url = recommend_feeds_sheet[i][14]
  351. download_cover_url = recommend_feeds_sheet[i][15]
  352. download_video_url = recommend_feeds_sheet[i][17]
  353. download_video_comment_cnt = 0
  354. download_video_share_cnt = 0
  355. Common.logger(log_type).info("正在判断第{}行", i + 1)
  356. Common.logger(log_type).info("download_video_title:{}", download_video_title)
  357. Common.logger(log_type).info("download_video_id:{}", download_video_id)
  358. Common.logger(log_type).info("download_video_play_cnt:{}", download_video_play_cnt)
  359. Common.logger(log_type).info("download_video_duration:{}", download_video_duration)
  360. Common.logger(log_type).info("download_video_send_time:{}", download_video_send_time)
  361. Common.logger(log_type).info("download_video_url:{}\n", download_video_url)
  362. # Common.logger(log_type).info("download_video_like_cnt:{}", download_video_like_cnt)
  363. # Common.logger(log_type).info("download_width_height:{}", download_width_height)
  364. # Common.logger(log_type).info("download_user_name:{}", download_user_name)
  365. # Common.logger(log_type).info("download_user_id:{}", download_user_id)
  366. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  367. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  368. # 过滤空行
  369. if download_video_id is None or download_video_title is None or download_video_play_cnt is None:
  370. Common.logger(log_type).warning("空行,略过\n")
  371. # # 过滤敏感词
  372. # elif any(word if word in download_video_title else False for word in
  373. # cls.sensitive_words(log_type)) is True:
  374. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  375. # Common.logger(log_type).info("视频已中敏感词,删除成功\n")
  376. # return
  377. # # 下载规则
  378. # elif cls.download_rule(download_video_share_cnt, download_video_play_cnt) is False:
  379. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  380. # Common.logger(log_type).info("不满足下载规则,删除成功\n")
  381. # return
  382. # 时长小于 60s,删除
  383. elif int(download_video_duration) < 60:
  384. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  385. Common.logger(log_type).info("时长{}<60,删除成功\n", download_video_duration)
  386. return
  387. # 已下载视频表去重
  388. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "gzh", "fCs3BT")
  389. for n in m]:
  390. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  391. Common.logger(log_type).info("该视频在公众号中已下载,删除成功\n")
  392. return
  393. # 看一看已下载表去重
  394. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "kanyikan", "20ce0c")
  395. for n in m]:
  396. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  397. Common.logger(log_type).info("该视频在看一看中已下载,删除成功\n")
  398. return
  399. else:
  400. # 下载封面
  401. Common.download_method(log_type=log_type, text="cover",
  402. d_name=str(download_video_title), d_url=str(download_cover_url))
  403. # 下载视频
  404. Common.download_method(log_type=log_type, text="video",
  405. d_name=str(download_video_title), d_url=str(download_video_url))
  406. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  407. with open("./videos/" + download_video_title + "/" + "info.txt",
  408. "a", encoding="UTF-8") as f_a:
  409. f_a.write(str(download_video_id) + "\n" +
  410. str(download_video_title) + "\n" +
  411. str(download_video_duration) + "\n" +
  412. str(download_video_play_cnt) + "\n" +
  413. str(download_video_comment_cnt) + "\n" +
  414. str(download_video_like_cnt) + "\n" +
  415. str(download_video_share_cnt) + "\n" +
  416. str(download_width_height) + "\n" +
  417. str(int(time.mktime(
  418. time.strptime(download_video_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  419. str(download_user_name) + "\n" +
  420. str(download_head_url) + "\n" +
  421. str(download_video_url) + "\n" +
  422. str(download_cover_url) + "\n" +
  423. "gzh")
  424. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  425. # 上传视频
  426. Common.logger(log_type).info("开始上传视频:{}".format(download_video_title))
  427. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  428. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  429. Common.logger(log_type).info("视频上传完成:{}", download_video_title)
  430. # 保存视频 ID 到云文档
  431. Common.logger(log_type).info("保存视频ID至云文档:{}", download_video_title)
  432. # 视频ID工作表,插入首行
  433. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  434. # 视频ID工作表,首行写入数据
  435. upload_time = int(time.time())
  436. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  437. "推荐榜",
  438. str(download_video_title),
  439. str(download_video_id),
  440. our_video_link,
  441. download_video_play_cnt,
  442. download_video_like_cnt,
  443. download_video_duration,
  444. str(download_width_height),
  445. str(download_video_send_time),
  446. str(download_user_name),
  447. str(download_user_id),
  448. str(download_head_url),
  449. str(download_cover_url),
  450. str(download_video_url)]]
  451. time.sleep(1)
  452. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  453. # 删除行或列,可选 ROWS、COLUMNS
  454. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  455. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_video_title)
  456. return
  457. except Exception as e:
  458. Common.logger(log_type).error("download_publish异常:{}", e)
  459. # 执行下载/上传
  460. @classmethod
  461. def run_download_publish(cls, log_type, env):
  462. try:
  463. while True:
  464. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  465. if len(recommend_feeds_sheet) == 1:
  466. Common.logger(log_type).info("下载/上传完成")
  467. break
  468. else:
  469. cls.download_publish(log_type, env)
  470. except Exception as e:
  471. Common.logger(log_type).error("run_download_publish异常:{}", e)
  472. if __name__ == "__main__":
  473. Recommend.get_recommend("recommend")
  474. # Recommend.download_publish("recommend")
  475. # Recommend.run_download_publish("recommend", "dev")
  476. # token = Recommend.get_token("recommend")
  477. # print(token)