gzh_recommend.py 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/1
  4. # import time
  5. import base64
  6. import json
  7. import os
  8. import sys
  9. import time
  10. # import urllib.parse
  11. import requests
  12. import urllib3
  13. sys.path.append(os.getcwd())
  14. from crawler_gzh.main.common import Common
  15. from crawler_gzh.main.feishu_lib import Feishu
  16. from crawler_gzh.main.publish import Publish
  17. proxies = {"http": None, "https": None}
  18. class Recommend:
  19. # 获取 token,保存至飞书云文档
  20. @classmethod
  21. def get_token(cls, log_type):
  22. # charles 抓包文件保存目录
  23. charles_file_dir = "./crawler-kanyikan-recommend/chlsfiles/"
  24. # charles_file_dir = "../chlsfiles/"
  25. if int(len(os.listdir(charles_file_dir))) == 1:
  26. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  27. time.sleep(60)
  28. else:
  29. try:
  30. # 目标文件夹下所有文件
  31. all_file = sorted(os.listdir(charles_file_dir))
  32. # 获取到目标文件
  33. old_file = all_file[-1]
  34. # 分离文件名与扩展名
  35. new_file = os.path.splitext(old_file)
  36. # 重命名文件后缀
  37. os.rename(os.path.join(charles_file_dir, old_file),
  38. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  39. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  40. contents = json.load(f, strict=False)
  41. Common.logger(log_type).info("chlsfile:{}", new_file)
  42. for content in contents:
  43. if "mp.weixin.qq.com" in content['host']:
  44. if content["path"] == r"/mp/getappmsgext":
  45. # query
  46. query = content["query"]
  47. Feishu.update_values("recommend", "gzh", "VzrN7E", "B9:B9", [[query]])
  48. # body
  49. headers = content["request"]["header"]["headers"]
  50. body = content["request"]["body"]["text"]
  51. # time.sleep(1)
  52. Feishu.update_values("recommend", "gzh", "VzrN7E", "B8:B8", [[body]])
  53. # title / vid
  54. title = content["request"]["body"]["text"].split("title=")[-1].split("&ct=")[0]
  55. vid = content["request"]["body"]["text"].split("vid=")[-1].split("&is_pay_subscribe")[0]
  56. # time.sleep(1)
  57. Feishu.update_values("recommend", "gzh", "VzrN7E", "B1:B1", [[title]])
  58. # time.sleep(1)
  59. Feishu.update_values("recommend", "gzh", "VzrN7E", "B2:B2", [[vid]])
  60. for h in headers:
  61. if h["name"] == "cookie" and "pass_ticket" in h["value"]:
  62. pass_ticket = h["value"].split("pass_ticket=")[-1]
  63. # print(f"pass_ticket:{pass_ticket}")
  64. Feishu.update_values("recommend", "gzh", "VzrN7E", "B5:B5", [[pass_ticket]])
  65. if h["name"] == "referer":
  66. referer = h["value"]
  67. # print(f"__biz:{referer}")
  68. Feishu.update_values("recommend", "gzh", "VzrN7E", "B7:B7", [[referer]])
  69. if h["name"] == "referer":
  70. __biz = h["value"].split("__biz=")[-1].split("&mid=")[0]
  71. # print(f"__biz:{__biz}")
  72. Feishu.update_values("recommend", "gzh", "VzrN7E", "B3:B3", [[__biz]])
  73. if h["name"] == "cookie" and "appmsg_token" in h["value"]:
  74. appmsg_token = h["value"].split("appmsg_token=")[-1]
  75. # print(f"appmsg_token:{appmsg_token}")
  76. Feishu.update_values("recommend", "gzh", "VzrN7E", "B4:B4", [[appmsg_token]])
  77. if h["name"] == "cookie" and "wap_sid2" in h["value"]:
  78. wap_sid2 = h["value"].split("wap_sid2=")[-1]
  79. # print(f"wap_sid2:{wap_sid2}")
  80. Feishu.update_values("recommend", "gzh", "VzrN7E", "B6:B6", [[wap_sid2]])
  81. except Exception as e:
  82. Common.logger(log_type).error("获取session异常,30s后重试:{}", e)
  83. time.sleep(30)
  84. cls.get_token(log_type)
  85. @classmethod
  86. def get_token_v2(cls, log_type):
  87. # charles 抓包文件保存目录
  88. charles_file_dir = "./crawler-kanyikan-recommend/chlsfiles/"
  89. # charles_file_dir = "../chlsfiles/"
  90. if int(len(os.listdir(charles_file_dir))) == 1:
  91. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  92. time.sleep(60)
  93. else:
  94. try:
  95. # 目标文件夹下所有文件
  96. all_file = sorted(os.listdir(charles_file_dir))
  97. # 获取到目标文件
  98. old_file = all_file[-1]
  99. # 分离文件名与扩展名
  100. new_file = os.path.splitext(old_file)
  101. # 重命名文件后缀
  102. os.rename(os.path.join(charles_file_dir, old_file),
  103. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  104. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  105. contents = json.load(f, strict=False)
  106. # Common.logger(log_type).info("chlsfile:{}\n", new_file)
  107. for content in contents:
  108. if content["host"] == "mp.weixin.qq.com" and content["path"] == r"/mp/getappmsgext":
  109. # query
  110. query = content["query"]
  111. Feishu.update_values("recommend", "gzh", "VzrN7E", "B9:B9", [[query]])
  112. Common.logger(log_type).info("保存query成功\n")
  113. headers = content["request"]["header"]["headers"]
  114. # body
  115. body = content["request"]["body"]["text"]
  116. # Common.logger(log_type).info("body:{}", body)
  117. Feishu.update_values("recommend", "gzh", "VzrN7E", "B8:B8", [[body]])
  118. Common.logger(log_type).info("保存body成功\n")
  119. # x-wechat-key
  120. for header in headers:
  121. if header["name"] == "x-wechat-key":
  122. x_wechat_key = header["value"]
  123. Feishu.update_values("recommend", "gzh", "VzrN7E", "B10:B10", [[x_wechat_key]])
  124. Common.logger(log_type).info("保存x_wechat_key成功\n")
  125. return True
  126. except Exception as e:
  127. Common.logger(log_type).error("get_token_v2异常:{}", e)
  128. # 获取推荐列表
  129. @classmethod
  130. def get_recommend(cls, log_type):
  131. try:
  132. token_sheet = Feishu.get_values_batch("recommend", "gzh", "VzrN7E")
  133. if token_sheet is None:
  134. Common.logger(log_type).info("未获取到token等信息,30s后重试")
  135. time.sleep(30)
  136. cls.get_recommend(log_type)
  137. else:
  138. # __biz = token_sheet[2][1]
  139. appmsg_token = token_sheet[3][1]
  140. pass_ticket = token_sheet[4][1]
  141. wap_sid2 = token_sheet[5][1]
  142. referer = token_sheet[6][1]
  143. body = token_sheet[7][1]
  144. query = token_sheet[8][1]
  145. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  146. headers = {
  147. # "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
  148. "content-type": 'text/plain',
  149. "accept": "*/*",
  150. "x-requested-with": "XMLHttpRequest",
  151. "accept-language": "zh-cn",
  152. "accept-encoding": "gzip, deflate, br",
  153. "origin": "https://mp.weixin.qq.com",
  154. "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 "
  155. "(KHTML, like Gecko) Mobile/15E148 MicroMessenger/8.0.26(0x18001a29)"
  156. " NetType/WIFI Language/zh_CN",
  157. "referer": referer
  158. }
  159. cookies = {
  160. "appmsg_token": appmsg_token,
  161. "devicetype": "iOS14.7.1",
  162. "lang": "zh_CN",
  163. "pass_ticket": pass_ticket,
  164. "rewardsn": "",
  165. "version": "18001a29",
  166. "wap_sid2": wap_sid2,
  167. "wxtokenkey": "777",
  168. "wxuin": "2010747860"
  169. }
  170. urllib3.disable_warnings()
  171. response = requests.post(url=url, headers=headers, cookies=cookies, params=query, data=body,
  172. verify=False, proxies=proxies)
  173. if "related_tag_video" not in response.json():
  174. Common.logger(log_type).warning("response:{}\n", response.text)
  175. elif len(response.json()["related_tag_video"]) == 0:
  176. Common.logger(log_type).warning("response:{}\n", response.text)
  177. time.sleep(10)
  178. cls.get_recommend(log_type)
  179. else:
  180. feeds = response.json()["related_tag_video"]
  181. for m in range(len(feeds)):
  182. # video_title
  183. if "title" not in feeds[m]:
  184. video_title = 0
  185. else:
  186. video_title = feeds[m]["title"]
  187. # video_title = base64.b64decode(video_title).decode("utf-8")
  188. # video_id
  189. if "vid" not in feeds[m]:
  190. video_id = 0
  191. else:
  192. video_id = feeds[m]["vid"]
  193. # play_cnt
  194. if "read_num" not in feeds[m]:
  195. play_cnt = 0
  196. else:
  197. play_cnt = feeds[m]["read_num"]
  198. # like_cnt
  199. if "like_num" not in feeds[m]:
  200. like_cnt = 0
  201. else:
  202. like_cnt = feeds[m]["like_num"]
  203. # duration
  204. if "duration" not in feeds[m]:
  205. duration = 0
  206. else:
  207. duration = feeds[m]["duration"]
  208. # video_width / video_height
  209. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  210. video_width = 0
  211. video_height = 0
  212. else:
  213. video_width = feeds[m]["videoWidth"]
  214. video_height = feeds[m]["videoHeight"]
  215. # send_time
  216. if "pubTime" not in feeds[m]:
  217. send_time = 0
  218. else:
  219. send_time = feeds[m]["pubTime"]
  220. # user_name
  221. if "srcDisplayName" not in feeds[m]:
  222. user_name = 0
  223. else:
  224. user_name = feeds[m]["srcDisplayName"]
  225. # user_name = base64.b64decode(user_name).decode("utf-8")
  226. # user_id
  227. if "srcUserName" not in feeds[m]:
  228. user_id = 0
  229. else:
  230. user_id = feeds[m]["srcUserName"]
  231. # head_url
  232. if "head_img_url" not in feeds[m]:
  233. head_url = 0
  234. else:
  235. head_url = feeds[m]["head_img_url"]
  236. # cover_url
  237. if "cover" not in feeds[m]:
  238. cover_url = 0
  239. else:
  240. cover_url = feeds[m]["cover"]
  241. # video_url
  242. if "url" not in feeds[m]:
  243. video_url = 0
  244. else:
  245. video_url = feeds[m]["url"]
  246. # 下载链接
  247. download_url = cls.get_url(log_type, video_url)
  248. Common.logger(log_type).info("video_title:{}", video_title)
  249. Common.logger(log_type).info("video_id:{}", video_id)
  250. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  251. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  252. Common.logger(log_type).info("duration:{}", duration)
  253. Common.logger(log_type).info("video_width:{}", video_width)
  254. Common.logger(log_type).info("video_height:{}", video_height)
  255. Common.logger(log_type).info("send_time:{}", send_time)
  256. Common.logger(log_type).info("user_name:{}", user_name)
  257. Common.logger(log_type).info("user_id:{}", user_id)
  258. Common.logger(log_type).info("head_url:{}", head_url)
  259. Common.logger(log_type).info("cover_url:{}", cover_url)
  260. Common.logger(log_type).info("video_url:{}", video_url)
  261. Common.logger(log_type).info("download_url:{}", download_url)
  262. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  263. Common.logger(log_type).info("无效视频\n")
  264. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  265. y]:
  266. Common.logger(log_type).info("该视频已下载\n")
  267. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  268. y]:
  269. Common.logger(log_type).info("该视频已在feeds中\n")
  270. else:
  271. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  272. get_feeds_time = int(time.time())
  273. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  274. "推荐榜",
  275. video_title,
  276. str(video_id),
  277. play_cnt,
  278. like_cnt,
  279. duration,
  280. str(video_width) + "*" + str(video_height),
  281. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  282. user_name,
  283. user_id,
  284. head_url,
  285. cover_url,
  286. video_url,
  287. download_url
  288. ]]
  289. time.sleep(1)
  290. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  291. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  292. except Exception as e:
  293. Common.logger(log_type).error("get_recommend异常:{}", e)
  294. # 使用 token 获取推荐列表
  295. @classmethod
  296. def get_recommend_by_token(cls, log_type):
  297. try:
  298. get_token = cls.get_token_v2(log_type)
  299. if get_token is not True:
  300. Common.logger(log_type).warning("未获取到token,10s后重试")
  301. time.sleep(10)
  302. cls.get_recommend_by_token(log_type)
  303. else:
  304. # 获取公众号token
  305. token_sheet = Feishu.get_values_batch(log_type, "gzh", "VzrN7E")
  306. body = token_sheet[7][1]
  307. query = token_sheet[8][1]
  308. x_wechat_key = token_sheet[9][1]
  309. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  310. headers = {
  311. "content-type": "application/x-www-form-urlencoded",
  312. "x-wechat-uin": "MjAxMDc0Nzg2MA%3D%3D",
  313. "accept": "*/*",
  314. "accept-encoding": "gzip, deflate, br",
  315. "x-wechat-key": x_wechat_key,
  316. "x-wechat-acctmode": "0",
  317. "exportkey": "ASgNaiqfqTTPeQ%2BQ7X3yqzA%3D",
  318. "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) "
  319. "AppleWebKit/605.1.15 (KHTML, like Gecko) Mobile/15E148 "
  320. "MicroMessenger/8.0.26(0x18001a2b) NetType/WIFI Language/zh_CN",
  321. "accept-language": "zh-cn"
  322. }
  323. # query_string = {
  324. # "f": "json",
  325. # "mock": "",
  326. # "fasttmplajax": "1",
  327. # "uin": "",
  328. # "key": "",
  329. # "pass_ticket": "MPA2Yy1dnOo6JSfV1DNWyJcLBO9dwupvcgkQj6sXOo3puAQKD7t4Odst6kRxfmUc",
  330. # "wxtoken": "",
  331. # "devicetype": "iOS14.7.1",
  332. # "clientversion": "18001a2b",
  333. # "__biz": "MzkwMjM4OTYyMA==",
  334. # "enterid": "1659926777",
  335. # "appmsg_token": "",
  336. # "x5": "0",
  337. # "wx_header": "1"
  338. # }
  339. # form = {
  340. # "r": "0.2395852290889654",
  341. # "__biz": "MzkwMjM4OTYyMA==",
  342. # "appmsg_type": "9",
  343. # "mid": "2247483674",
  344. # "sn": "4719d4e269e8923f7cad6c8a1e43d14e",
  345. # "idx": "1",
  346. # "scene": "102",
  347. # "title": "%E4%B8%A4%E5%85%84%E5%BC%9F%E6%95%B4%E5%A4%A9%E5%A5%BD%E5%90%83%E6%87%92%E5%81%9A%EF%BC%8C%E6%97%A0%E6%89%80%E4%BA%8B%E4%BA%8B%E8%80%81%E6%83%B3%E7%9D%80%E4%B8%8D%E5%8A%B3%E8%80%8C%E8%8E%B7%EF%BC%8C%E5%A5%BD%E4%BA%86%E6%8A%A5%E5%BA%94%E6%9D%A5%E4%BA%86",
  348. # "ct": "1659803693",
  349. # "abtest_cookie": "",
  350. # "devicetype": "iOS14.7.1",
  351. # "version": "18001a2b",
  352. # "is_need_ticket": "0",
  353. # "is_need_ad": "1",
  354. # "comment_id": "0",
  355. # "is_need_reward": "0",
  356. # "both_ad": "0",
  357. # "reward_uin_count": "0",
  358. # "send_time": "",
  359. # "msg_daily_idx": "1",
  360. # "is_original": "0",
  361. # "is_only_read": "1",
  362. # "req_id": "",
  363. # "pass_ticket": "MPA2Yy1dnOo6JSfV1DNWyJcLBO9dwupvcgkQj6sXOo3puAQKD7t4Odst6kRxfmUc",
  364. # "is_temp_url": "0",
  365. # "item_show_type": "5",
  366. # "tmp_version": "1",
  367. # "more_read_type": "0",
  368. # "appmsg_like_type": "2",
  369. # "related_video_sn": "",
  370. # "related_video_num": "5",
  371. # "vid": "wxv_2520118281538846720",
  372. # "is_pay_subscribe": "0",
  373. # "pay_subscribe_uin_count": "0",
  374. # "has_red_packet_cover": "0",
  375. # "album_id": "1296223588617486300",
  376. # "album_video_num": "5",
  377. # "cur_album_id": "",
  378. # "is_public_related_video": "0",
  379. # "encode_info_by_base64": "1",
  380. # "exptype": ""
  381. # }
  382. urllib3.disable_warnings()
  383. response = requests.post(url=url, headers=headers, params=query, data=body, proxies=proxies,
  384. verify=False)
  385. if "related_tag_video" not in response.json():
  386. Common.logger(log_type).warning("response:{}\n", response.text)
  387. elif len(response.json()["related_tag_video"]) == 0:
  388. Common.logger(log_type).warning("response:{}\n", response.text)
  389. # time.sleep(10)
  390. # cls.get_recommend(log_type)
  391. else:
  392. feeds = response.json()["related_tag_video"]
  393. for m in range(len(feeds)):
  394. # video_title
  395. if "title" not in feeds[m]:
  396. video_title = 0
  397. else:
  398. video_title = feeds[m]["title"]
  399. video_title = base64.b64decode(video_title).decode("utf-8")
  400. # video_id
  401. if "vid" not in feeds[m]:
  402. video_id = 0
  403. else:
  404. video_id = feeds[m]["vid"]
  405. # play_cnt
  406. if "read_num" not in feeds[m]:
  407. play_cnt = 0
  408. else:
  409. play_cnt = feeds[m]["read_num"]
  410. # like_cnt
  411. if "like_num" not in feeds[m]:
  412. like_cnt = 0
  413. else:
  414. like_cnt = feeds[m]["like_num"]
  415. # duration
  416. if "duration" not in feeds[m]:
  417. duration = 0
  418. else:
  419. duration = feeds[m]["duration"]
  420. # video_width / video_height
  421. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  422. video_width = 0
  423. video_height = 0
  424. else:
  425. video_width = feeds[m]["videoWidth"]
  426. video_height = feeds[m]["videoHeight"]
  427. # send_time
  428. if "pubTime" not in feeds[m]:
  429. send_time = 0
  430. else:
  431. send_time = feeds[m]["pubTime"]
  432. # user_name
  433. if "srcDisplayName" not in feeds[m]:
  434. user_name = 0
  435. else:
  436. user_name = feeds[m]["srcDisplayName"]
  437. user_name = base64.b64decode(user_name).decode("utf-8")
  438. # user_id
  439. if "srcUserName" not in feeds[m]:
  440. user_id = 0
  441. else:
  442. user_id = feeds[m]["srcUserName"]
  443. # head_url
  444. if "head_img_url" not in feeds[m]:
  445. head_url = 0
  446. else:
  447. head_url = feeds[m]["head_img_url"]
  448. # cover_url
  449. if "cover" not in feeds[m]:
  450. cover_url = 0
  451. else:
  452. cover_url = feeds[m]["cover"]
  453. # video_url
  454. if "url" not in feeds[m]:
  455. video_url = 0
  456. else:
  457. video_url = feeds[m]["url"]
  458. # 下载链接
  459. download_url = cls.get_url(log_type, video_url)
  460. Common.logger(log_type).info("video_title:{}", video_title)
  461. Common.logger(log_type).info("video_id:{}", video_id)
  462. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  463. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  464. Common.logger(log_type).info("duration:{}", duration)
  465. Common.logger(log_type).info("video_width:{}", video_width)
  466. Common.logger(log_type).info("video_height:{}", video_height)
  467. Common.logger(log_type).info("send_time:{}", send_time)
  468. Common.logger(log_type).info("user_name:{}", user_name)
  469. Common.logger(log_type).info("user_id:{}", user_id)
  470. Common.logger(log_type).info("head_url:{}", head_url)
  471. Common.logger(log_type).info("cover_url:{}", cover_url)
  472. Common.logger(log_type).info("video_url:{}", video_url)
  473. Common.logger(log_type).info("download_url:{}", download_url)
  474. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  475. Common.logger(log_type).info("无效视频\n")
  476. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  477. y]:
  478. Common.logger(log_type).info("该视频已下载\n")
  479. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  480. y]:
  481. Common.logger(log_type).info("该视频已在feeds中\n")
  482. else:
  483. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  484. get_feeds_time = int(time.time())
  485. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  486. "推荐榜",
  487. video_title,
  488. str(video_id),
  489. play_cnt,
  490. like_cnt,
  491. duration,
  492. str(video_width) + "*" + str(video_height),
  493. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  494. user_name,
  495. user_id,
  496. head_url,
  497. cover_url,
  498. video_url,
  499. download_url
  500. ]]
  501. time.sleep(1)
  502. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  503. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  504. except Exception as e:
  505. Common.logger(log_type).error("get_recommend_by_token异常:{}", e)
  506. # 获取视频下载链接
  507. @classmethod
  508. def get_url(cls, log_type, url):
  509. try:
  510. payload = {}
  511. headers = {
  512. 'Cookie': 'rewardsn=; wxtokenkey=777'
  513. }
  514. urllib3.disable_warnings()
  515. response = requests.get(url=url, headers=headers, data=payload, verify=False, proxies=proxies)
  516. response_list = response.text.splitlines()
  517. video_url_list = []
  518. for m in response_list:
  519. if "mpvideo.qpic.cn" in m:
  520. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  521. video_url_list.append(video_url)
  522. video_url = video_url_list[0]
  523. return video_url
  524. except Exception as e:
  525. Common.logger(log_type).error("get_url异常:{}", e)
  526. # 下载/上传
  527. @classmethod
  528. def download_publish(cls, log_type, env):
  529. try:
  530. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  531. for i in range(1, len(recommend_feeds_sheet)):
  532. download_video_title = recommend_feeds_sheet[i][5]
  533. download_video_id = recommend_feeds_sheet[i][6]
  534. download_video_play_cnt = recommend_feeds_sheet[i][7]
  535. download_video_like_cnt = recommend_feeds_sheet[i][8]
  536. download_video_duration = recommend_feeds_sheet[i][9]
  537. download_width_height = recommend_feeds_sheet[i][10]
  538. download_video_send_time = recommend_feeds_sheet[i][11]
  539. download_user_name = recommend_feeds_sheet[i][12]
  540. download_user_id = recommend_feeds_sheet[i][13]
  541. download_head_url = recommend_feeds_sheet[i][14]
  542. download_cover_url = recommend_feeds_sheet[i][15]
  543. download_video_url = recommend_feeds_sheet[i][17]
  544. download_video_comment_cnt = 0
  545. download_video_share_cnt = 0
  546. Common.logger(log_type).info("正在判断第{}行", i + 1)
  547. Common.logger(log_type).info("download_video_title:{}", download_video_title)
  548. Common.logger(log_type).info("download_video_id:{}", download_video_id)
  549. Common.logger(log_type).info("download_video_play_cnt:{}", download_video_play_cnt)
  550. Common.logger(log_type).info("download_video_duration:{}", download_video_duration)
  551. Common.logger(log_type).info("download_video_send_time:{}", download_video_send_time)
  552. Common.logger(log_type).info("download_video_url:{}\n", download_video_url)
  553. # Common.logger(log_type).info("download_video_like_cnt:{}", download_video_like_cnt)
  554. # Common.logger(log_type).info("download_width_height:{}", download_width_height)
  555. # Common.logger(log_type).info("download_user_name:{}", download_user_name)
  556. # Common.logger(log_type).info("download_user_id:{}", download_user_id)
  557. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  558. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  559. # 过滤空行
  560. if download_video_id is None or download_video_title is None or download_video_play_cnt is None:
  561. Common.logger(log_type).warning("空行,略过\n")
  562. # # 过滤敏感词
  563. # elif any(word if word in download_video_title else False for word in
  564. # cls.sensitive_words(log_type)) is True:
  565. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  566. # Common.logger(log_type).info("视频已中敏感词,删除成功\n")
  567. # return
  568. # # 下载规则
  569. # elif cls.download_rule(download_video_share_cnt, download_video_play_cnt) is False:
  570. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  571. # Common.logger(log_type).info("不满足下载规则,删除成功\n")
  572. # return
  573. # 时长小于 60s,删除
  574. elif int(download_video_duration) < 60:
  575. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  576. Common.logger(log_type).info("时长{}<60,删除成功\n", download_video_duration)
  577. return
  578. # 已下载视频表去重
  579. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "gzh", "fCs3BT")
  580. for n in m]:
  581. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  582. Common.logger(log_type).info("该视频在公众号中已下载,删除成功\n")
  583. return
  584. # 看一看已下载表去重
  585. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "kanyikan", "20ce0c")
  586. for n in m]:
  587. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  588. Common.logger(log_type).info("该视频在看一看中已下载,删除成功\n")
  589. return
  590. else:
  591. # 下载封面
  592. Common.download_method(log_type=log_type, text="cover",
  593. d_name=str(download_video_title), d_url=str(download_cover_url))
  594. # 下载视频
  595. Common.download_method(log_type=log_type, text="video",
  596. d_name=str(download_video_title), d_url=str(download_video_url))
  597. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  598. with open("./crawler_gzh/videos/" + download_video_title + "/" + "info.txt",
  599. "a", encoding="UTF-8") as f_a:
  600. f_a.write(str(download_video_id) + "\n" +
  601. str(download_video_title) + "\n" +
  602. str(download_video_duration) + "\n" +
  603. str(download_video_play_cnt) + "\n" +
  604. str(download_video_comment_cnt) + "\n" +
  605. str(download_video_like_cnt) + "\n" +
  606. str(download_video_share_cnt) + "\n" +
  607. str(download_width_height) + "\n" +
  608. str(int(time.mktime(
  609. time.strptime(download_video_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  610. str(download_user_name) + "\n" +
  611. str(download_head_url) + "\n" +
  612. str(download_video_url) + "\n" +
  613. str(download_cover_url) + "\n" +
  614. "gzh")
  615. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  616. # 上传视频
  617. Common.logger(log_type).info("开始上传视频:{}".format(download_video_title))
  618. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  619. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  620. Common.logger(log_type).info("视频上传完成:{}", download_video_title)
  621. # 保存视频 ID 到云文档
  622. Common.logger(log_type).info("保存视频ID至云文档:{}", download_video_title)
  623. # 视频ID工作表,插入首行
  624. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  625. # 视频ID工作表,首行写入数据
  626. upload_time = int(time.time())
  627. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  628. "推荐榜",
  629. str(download_video_title),
  630. str(download_video_id),
  631. our_video_link,
  632. download_video_play_cnt,
  633. download_video_like_cnt,
  634. download_video_duration,
  635. str(download_width_height),
  636. str(download_video_send_time),
  637. str(download_user_name),
  638. str(download_user_id),
  639. str(download_head_url),
  640. str(download_cover_url),
  641. str(download_video_url)]]
  642. time.sleep(1)
  643. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  644. # 删除行或列,可选 ROWS、COLUMNS
  645. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  646. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_video_title)
  647. return
  648. except Exception as e:
  649. Common.logger(log_type).error("download_publish异常:{}", e)
  650. # 执行下载/上传
  651. @classmethod
  652. def run_download_publish(cls, log_type, env):
  653. try:
  654. while True:
  655. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  656. if len(recommend_feeds_sheet) == 1:
  657. Common.logger(log_type).info("下载/上传完成")
  658. break
  659. else:
  660. cls.download_publish(log_type, env)
  661. except Exception as e:
  662. Common.logger(log_type).error("run_download_publish异常:{}", e)
  663. if __name__ == "__main__":
  664. Recommend.get_recommend_by_token("recommend")
  665. # Recommend.download_publish("recommend")
  666. # Recommend.run_download_publish("recommend", "dev")
  667. # print(Recommend.get_token_v2("recommend"))
  668. # print(token)