gzh_recommend.py 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/1
  4. # import time
  5. import base64
  6. import json
  7. import os
  8. import sys
  9. import time
  10. # import urllib.parse
  11. import requests
  12. import urllib3
  13. sys.path.append(os.getcwd())
  14. from crawler_gzh.main.common import Common
  15. from crawler_gzh.main.feishu_lib import Feishu
  16. from crawler_gzh.main.publish import Publish
  17. proxies = {"http": None, "https": None}
  18. class Recommend:
  19. # 获取 token,保存至飞书云文档
  20. @classmethod
  21. def get_token(cls, log_type):
  22. # charles 抓包文件保存目录
  23. charles_file_dir = "./crawler-kanyikan-recommend/chlsfiles/"
  24. # charles_file_dir = "../chlsfiles/"
  25. if int(len(os.listdir(charles_file_dir))) == 1:
  26. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  27. time.sleep(60)
  28. else:
  29. try:
  30. # 目标文件夹下所有文件
  31. all_file = sorted(os.listdir(charles_file_dir))
  32. # 获取到目标文件
  33. old_file = all_file[-1]
  34. # 分离文件名与扩展名
  35. new_file = os.path.splitext(old_file)
  36. # 重命名文件后缀
  37. os.rename(os.path.join(charles_file_dir, old_file),
  38. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  39. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  40. contents = json.load(f, strict=False)
  41. Common.logger(log_type).info("chlsfile:{}", new_file)
  42. for content in contents:
  43. if "mp.weixin.qq.com" in content['host']:
  44. if content["path"] == r"/mp/getappmsgext":
  45. # query
  46. query = content["query"]
  47. Feishu.update_values("recommend", "gzh", "VzrN7E", "B9:B9", [[query]])
  48. # body
  49. headers = content["request"]["header"]["headers"]
  50. body = content["request"]["body"]["text"]
  51. # time.sleep(1)
  52. Feishu.update_values("recommend", "gzh", "VzrN7E", "B8:B8", [[body]])
  53. # title / vid
  54. title = content["request"]["body"]["text"].split("title=")[-1].split("&ct=")[0]
  55. vid = content["request"]["body"]["text"].split("vid=")[-1].split("&is_pay_subscribe")[0]
  56. # time.sleep(1)
  57. Feishu.update_values("recommend", "gzh", "VzrN7E", "B1:B1", [[title]])
  58. # time.sleep(1)
  59. Feishu.update_values("recommend", "gzh", "VzrN7E", "B2:B2", [[vid]])
  60. for h in headers:
  61. if h["name"] == "cookie" and "pass_ticket" in h["value"]:
  62. pass_ticket = h["value"].split("pass_ticket=")[-1]
  63. # print(f"pass_ticket:{pass_ticket}")
  64. Feishu.update_values("recommend", "gzh", "VzrN7E", "B5:B5", [[pass_ticket]])
  65. if h["name"] == "referer":
  66. referer = h["value"]
  67. # print(f"__biz:{referer}")
  68. Feishu.update_values("recommend", "gzh", "VzrN7E", "B7:B7", [[referer]])
  69. if h["name"] == "referer":
  70. __biz = h["value"].split("__biz=")[-1].split("&mid=")[0]
  71. # print(f"__biz:{__biz}")
  72. Feishu.update_values("recommend", "gzh", "VzrN7E", "B3:B3", [[__biz]])
  73. if h["name"] == "cookie" and "appmsg_token" in h["value"]:
  74. appmsg_token = h["value"].split("appmsg_token=")[-1]
  75. # print(f"appmsg_token:{appmsg_token}")
  76. Feishu.update_values("recommend", "gzh", "VzrN7E", "B4:B4", [[appmsg_token]])
  77. if h["name"] == "cookie" and "wap_sid2" in h["value"]:
  78. wap_sid2 = h["value"].split("wap_sid2=")[-1]
  79. # print(f"wap_sid2:{wap_sid2}")
  80. Feishu.update_values("recommend", "gzh", "VzrN7E", "B6:B6", [[wap_sid2]])
  81. except Exception as e:
  82. Common.logger(log_type).error("获取session异常,30s后重试:{}", e)
  83. time.sleep(30)
  84. cls.get_token(log_type)
  85. @classmethod
  86. def get_token_v2(cls, log_type):
  87. # charles 抓包文件保存目录
  88. # charles_file_dir = "./crawler-kanyikan-recommend/chlsfiles/"
  89. charles_file_dir = "../chlsfiles/"
  90. if int(len(os.listdir(charles_file_dir))) == 1:
  91. Common.logger(log_type).info("未找到chlsfile文件,等待60s")
  92. time.sleep(60)
  93. else:
  94. try:
  95. # 目标文件夹下所有文件
  96. all_file = sorted(os.listdir(charles_file_dir))
  97. # 获取到目标文件
  98. old_file = all_file[-1]
  99. # 分离文件名与扩展名
  100. new_file = os.path.splitext(old_file)
  101. # 重命名文件后缀
  102. os.rename(os.path.join(charles_file_dir, old_file),
  103. os.path.join(charles_file_dir, new_file[0] + ".txt"))
  104. with open(charles_file_dir + new_file[0] + ".txt", encoding='utf-8-sig', errors='ignore') as f:
  105. contents = json.load(f, strict=False)
  106. # Common.logger(log_type).info("chlsfile:{}\n", new_file)
  107. for content in contents:
  108. if content["host"] == "mp.weixin.qq.com" and content["path"] == r"/mp/getappmsgext":
  109. # Common.logger(log_type).info("content:{}\n", content)
  110. # query
  111. query = content["query"]
  112. # Common.logger(log_type).info("query:{}\n", query)
  113. Feishu.update_values("recommend", "gzh", "VzrN7E", "B9:B9", [[query]])
  114. Common.logger(log_type).info("保存query成功\n")
  115. # body
  116. body = content["request"]["body"]["text"]
  117. # Common.logger(log_type).info("body:{}", body)
  118. Feishu.update_values("recommend", "gzh", "VzrN7E", "B8:B8", [[body]])
  119. Common.logger(log_type).info("保存body成功\n")
  120. # referer
  121. headers = content["request"]["header"]["headers"]
  122. # Common.logger(log_type).info("headers:{}", headers)
  123. for header in headers:
  124. # referer
  125. if header["name"] == "referer":
  126. referer = header["value"]
  127. # Common.logger(log_type).info("referer:{}\n", referer)
  128. Feishu.update_values("recommend", "gzh", "VzrN7E", "B7:B7", [[referer]])
  129. Common.logger(log_type).info("保存referer成功\n")
  130. # wxuin
  131. if header["name"] == "cookie" and "wxuin" in header["value"]:
  132. wxuin = header["value"].split("wxuin=")[-1]
  133. # Common.logger(log_type).info("wxuin:{}\n", wxuin)
  134. Feishu.update_values("recommend", "gzh", "VzrN7E", "B10:B10", [[wxuin]])
  135. Common.logger(log_type).info("保存wxuin成功\n")
  136. # version
  137. if header["name"] == "cookie" and "version" in header["value"]:
  138. version = header["value"].split("version=")[-1]
  139. Common.logger(log_type).info("version:{}\n", version)
  140. Feishu.update_values("recommend", "gzh", "VzrN7E", "B11:B11", [[version]])
  141. Common.logger(log_type).info("保存version成功\n")
  142. # pass_ticket
  143. if header["name"] == "cookie" and "pass_ticket" in header["value"]:
  144. pass_ticket = header["value"].split("pass_ticket=")[-1]
  145. Common.logger(log_type).info("pass_ticket:{}\n", pass_ticket)
  146. Feishu.update_values("recommend", "gzh", "VzrN7E", "B5:B5", [[pass_ticket]])
  147. Common.logger(log_type).info("保存pass_ticket成功\n")
  148. # appmsg_token
  149. if header["name"] == "cookie" and "appmsg_token" in header["value"]:
  150. appmsg_token = header["value"].split("appmsg_token=")[-1]
  151. Common.logger(log_type).info("appmsg_token:{}\n", appmsg_token)
  152. Feishu.update_values("recommend", "gzh", "VzrN7E", "B4:B4", [[appmsg_token]])
  153. Common.logger(log_type).info("保存appmsg_token成功\n")
  154. # appmsg_token
  155. if header["name"] == "cookie" and "wap_sid2" in header["value"]:
  156. wap_sid2 = header["value"].split("wap_sid2=")[-1]
  157. Common.logger(log_type).info("wap_sid2:{}\n", wap_sid2)
  158. Feishu.update_values("recommend", "gzh", "VzrN7E", "B6:B6", [[wap_sid2]])
  159. Common.logger(log_type).info("保存wap_sid2成功\n")
  160. return True
  161. # x-wechat-key
  162. # for header in headers:
  163. # if header["name"] == "x-wechat-key":
  164. # x_wechat_key = header["value"]
  165. # Common.logger(log_type).info("x_wechat_key:{}\n", x_wechat_key)
  166. # Feishu.update_values("recommend", "gzh", "VzrN7E", "B12:B12", [[x_wechat_key]])
  167. # Common.logger(log_type).info("保存x_wechat_key成功\n")
  168. # return True
  169. except Exception as e:
  170. Common.logger(log_type).error("get_token_v2异常:{}", e)
  171. # 获取推荐列表
  172. @classmethod
  173. def get_recommend(cls, log_type):
  174. try:
  175. token_sheet = Feishu.get_values_batch("recommend", "gzh", "VzrN7E")
  176. if token_sheet is None:
  177. Common.logger(log_type).info("未获取到token等信息,30s后重试")
  178. time.sleep(30)
  179. cls.get_recommend(log_type)
  180. else:
  181. # __biz = token_sheet[2][1]
  182. appmsg_token = token_sheet[3][1]
  183. pass_ticket = token_sheet[4][1]
  184. wap_sid2 = token_sheet[5][1]
  185. referer = token_sheet[6][1]
  186. body = token_sheet[7][1]
  187. query = token_sheet[8][1]
  188. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  189. headers = {
  190. # "content-type": "application/x-www-form-urlencoded; charset=UTF-8",
  191. "content-type": 'text/plain',
  192. "accept": "*/*",
  193. "x-requested-with": "XMLHttpRequest",
  194. "accept-language": "zh-cn",
  195. "accept-encoding": "gzip, deflate, br",
  196. "origin": "https://mp.weixin.qq.com",
  197. "user-agent": "Mozilla/5.0 (iPhone; CPU iPhone OS 14_7_1 like Mac OS X) AppleWebKit/605.1.15 "
  198. "(KHTML, like Gecko) Mobile/15E148 MicroMessenger/8.0.26(0x18001a29)"
  199. " NetType/WIFI Language/zh_CN",
  200. "referer": referer
  201. }
  202. cookies = {
  203. "appmsg_token": appmsg_token,
  204. "devicetype": "iOS14.7.1",
  205. "lang": "zh_CN",
  206. "pass_ticket": pass_ticket,
  207. "rewardsn": "",
  208. "version": "18001a29",
  209. "wap_sid2": wap_sid2,
  210. "wxtokenkey": "777",
  211. "wxuin": "2010747860"
  212. }
  213. urllib3.disable_warnings()
  214. response = requests.post(url=url, headers=headers, cookies=cookies, params=query, data=body,
  215. verify=False, proxies=proxies)
  216. if "related_tag_video" not in response.json():
  217. Common.logger(log_type).warning("response:{}\n", response.text)
  218. elif len(response.json()["related_tag_video"]) == 0:
  219. Common.logger(log_type).warning("response:{}\n", response.text)
  220. time.sleep(10)
  221. cls.get_recommend(log_type)
  222. else:
  223. feeds = response.json()["related_tag_video"]
  224. for m in range(len(feeds)):
  225. # video_title
  226. if "title" not in feeds[m]:
  227. video_title = 0
  228. else:
  229. video_title = feeds[m]["title"]
  230. # video_title = base64.b64decode(video_title).decode("utf-8")
  231. # video_id
  232. if "vid" not in feeds[m]:
  233. video_id = 0
  234. else:
  235. video_id = feeds[m]["vid"]
  236. # play_cnt
  237. if "read_num" not in feeds[m]:
  238. play_cnt = 0
  239. else:
  240. play_cnt = feeds[m]["read_num"]
  241. # like_cnt
  242. if "like_num" not in feeds[m]:
  243. like_cnt = 0
  244. else:
  245. like_cnt = feeds[m]["like_num"]
  246. # duration
  247. if "duration" not in feeds[m]:
  248. duration = 0
  249. else:
  250. duration = feeds[m]["duration"]
  251. # video_width / video_height
  252. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  253. video_width = 0
  254. video_height = 0
  255. else:
  256. video_width = feeds[m]["videoWidth"]
  257. video_height = feeds[m]["videoHeight"]
  258. # send_time
  259. if "pubTime" not in feeds[m]:
  260. send_time = 0
  261. else:
  262. send_time = feeds[m]["pubTime"]
  263. # user_name
  264. if "srcDisplayName" not in feeds[m]:
  265. user_name = 0
  266. else:
  267. user_name = feeds[m]["srcDisplayName"]
  268. # user_name = base64.b64decode(user_name).decode("utf-8")
  269. # user_id
  270. if "srcUserName" not in feeds[m]:
  271. user_id = 0
  272. else:
  273. user_id = feeds[m]["srcUserName"]
  274. # head_url
  275. if "head_img_url" not in feeds[m]:
  276. head_url = 0
  277. else:
  278. head_url = feeds[m]["head_img_url"]
  279. # cover_url
  280. if "cover" not in feeds[m]:
  281. cover_url = 0
  282. else:
  283. cover_url = feeds[m]["cover"]
  284. # video_url
  285. if "url" not in feeds[m]:
  286. video_url = 0
  287. else:
  288. video_url = feeds[m]["url"]
  289. # 下载链接
  290. download_url = cls.get_url(log_type, video_url)
  291. Common.logger(log_type).info("video_title:{}", video_title)
  292. Common.logger(log_type).info("video_id:{}", video_id)
  293. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  294. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  295. Common.logger(log_type).info("duration:{}", duration)
  296. Common.logger(log_type).info("video_width:{}", video_width)
  297. Common.logger(log_type).info("video_height:{}", video_height)
  298. Common.logger(log_type).info("send_time:{}", send_time)
  299. Common.logger(log_type).info("user_name:{}", user_name)
  300. Common.logger(log_type).info("user_id:{}", user_id)
  301. Common.logger(log_type).info("head_url:{}", head_url)
  302. Common.logger(log_type).info("cover_url:{}", cover_url)
  303. Common.logger(log_type).info("video_url:{}", video_url)
  304. Common.logger(log_type).info("download_url:{}", download_url)
  305. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  306. Common.logger(log_type).info("无效视频\n")
  307. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  308. y]:
  309. Common.logger(log_type).info("该视频已下载\n")
  310. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  311. y]:
  312. Common.logger(log_type).info("该视频已在feeds中\n")
  313. else:
  314. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  315. get_feeds_time = int(time.time())
  316. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  317. "推荐榜",
  318. video_title,
  319. str(video_id),
  320. play_cnt,
  321. like_cnt,
  322. duration,
  323. str(video_width) + "*" + str(video_height),
  324. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  325. user_name,
  326. user_id,
  327. head_url,
  328. cover_url,
  329. video_url,
  330. download_url
  331. ]]
  332. time.sleep(1)
  333. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  334. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  335. except Exception as e:
  336. Common.logger(log_type).error("get_recommend异常:{}", e)
  337. # 使用 token 获取推荐列表
  338. @classmethod
  339. def get_recommend_by_token(cls, log_type):
  340. try:
  341. get_token = cls.get_token_v2(log_type)
  342. if get_token is not True:
  343. Common.logger(log_type).warning("未获取到token,10s后重试")
  344. time.sleep(10)
  345. cls.get_recommend_by_token(log_type)
  346. else:
  347. # 获取公众号token
  348. token_sheet = Feishu.get_values_batch(log_type, "gzh", "VzrN7E")
  349. appmsg_token = token_sheet[3][1]
  350. pass_ticket = token_sheet[4][1]
  351. wap_sid2 = token_sheet[5][1]
  352. referer = token_sheet[6][1]
  353. body = token_sheet[7][1]
  354. query = token_sheet[8][1]
  355. wxuin = token_sheet[9][1]
  356. version = token_sheet[10][1]
  357. # x_wechat_key = token_sheet[11][1]
  358. url = "https://mp.weixin.qq.com/mp/getappmsgext?"
  359. headers = {
  360. "origin": "https://mp.weixin.qq.com",
  361. "x-requested-with": "XMLHttpRequest",
  362. "content-type": "application/x-www-form-urlencoded",
  363. "accept": "*/*",
  364. "sec-fetch-site": "same-origin",
  365. "sec-fetch-mode": "cors",
  366. "sec-fetch-dest": "empty",
  367. "accept-encoding": "gzip, deflate, br",
  368. "user-agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) "
  369. "Chrome/81.0.4044.138 Safari/537.36 NetType/WIFI "
  370. "MicroMessenger/7.0.20.1781(0x6700143B) WindowsWechat(0x6307001e)",
  371. "accept-language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
  372. "referer": str(referer)
  373. }
  374. cookies = {
  375. "rewardsn": "",
  376. "wxtokenkey": "777",
  377. "devicetype": "Windows10x64",
  378. "lang": "zh_CN",
  379. "wxuin": str(wxuin),
  380. "version": str(version),
  381. "pass_ticket": str(pass_ticket),
  382. "appmsg_token": str(appmsg_token),
  383. "wap_sid2": str(wap_sid2)
  384. }
  385. # query_string = {
  386. # "f": "json",
  387. # "mock": "",
  388. # "fasttmplajax": "1",
  389. # "uin": "",
  390. # "key": "",
  391. # "pass_ticket": "MPA2Yy1dnOo6JSfV1DNWyJcLBO9dwupvcgkQj6sXOo3puAQKD7t4Odst6kRxfmUc",
  392. # "wxtoken": "",
  393. # "devicetype": "iOS14.7.1",
  394. # "clientversion": "18001a2b",
  395. # "__biz": "MzkwMjM4OTYyMA==",
  396. # "enterid": "1659926777",
  397. # "appmsg_token": "",
  398. # "x5": "0",
  399. # "wx_header": "1"
  400. # }
  401. # form = {
  402. # "r": "0.2395852290889654",
  403. # "__biz": "MzkwMjM4OTYyMA==",
  404. # "appmsg_type": "9",
  405. # "mid": "2247483674",
  406. # "sn": "4719d4e269e8923f7cad6c8a1e43d14e",
  407. # "idx": "1",
  408. # "scene": "102",
  409. # "title": "%E4%B8%A4%E5%85%84%E5%BC%9F%E6%95%B4%E5%A4%A9%E5%A5%BD%E5%90%83%E6%87%92%E5%81%9A%EF%BC%8C%E6%97%A0%E6%89%80%E4%BA%8B%E4%BA%8B%E8%80%81%E6%83%B3%E7%9D%80%E4%B8%8D%E5%8A%B3%E8%80%8C%E8%8E%B7%EF%BC%8C%E5%A5%BD%E4%BA%86%E6%8A%A5%E5%BA%94%E6%9D%A5%E4%BA%86",
  410. # "ct": "1659803693",
  411. # "abtest_cookie": "",
  412. # "devicetype": "iOS14.7.1",
  413. # "version": "18001a2b",
  414. # "is_need_ticket": "0",
  415. # "is_need_ad": "1",
  416. # "comment_id": "0",
  417. # "is_need_reward": "0",
  418. # "both_ad": "0",
  419. # "reward_uin_count": "0",
  420. # "send_time": "",
  421. # "msg_daily_idx": "1",
  422. # "is_original": "0",
  423. # "is_only_read": "1",
  424. # "req_id": "",
  425. # "pass_ticket": "MPA2Yy1dnOo6JSfV1DNWyJcLBO9dwupvcgkQj6sXOo3puAQKD7t4Odst6kRxfmUc",
  426. # "is_temp_url": "0",
  427. # "item_show_type": "5",
  428. # "tmp_version": "1",
  429. # "more_read_type": "0",
  430. # "appmsg_like_type": "2",
  431. # "related_video_sn": "",
  432. # "related_video_num": "5",
  433. # "vid": "wxv_2520118281538846720",
  434. # "is_pay_subscribe": "0",
  435. # "pay_subscribe_uin_count": "0",
  436. # "has_red_packet_cover": "0",
  437. # "album_id": "1296223588617486300",
  438. # "album_video_num": "5",
  439. # "cur_album_id": "",
  440. # "is_public_related_video": "0",
  441. # "encode_info_by_base64": "1",
  442. # "exptype": ""
  443. # }
  444. urllib3.disable_warnings()
  445. response = requests.post(url=url, headers=headers, cookies=cookies, params=query, data=body,
  446. proxies=proxies, verify=False)
  447. if "related_tag_video" not in response.json():
  448. Common.logger(log_type).warning("response:{}\n", response.text)
  449. elif len(response.json()["related_tag_video"]) == 0:
  450. Common.logger(log_type).warning("response:{}\n", response.text)
  451. # time.sleep(10)
  452. # cls.get_recommend(log_type)
  453. else:
  454. feeds = response.json()["related_tag_video"]
  455. for m in range(len(feeds)):
  456. # video_title
  457. if "title" not in feeds[m]:
  458. video_title = 0
  459. else:
  460. video_title = feeds[m]["title"]
  461. # video_title = base64.b64decode(video_title).decode("utf-8")
  462. # video_id
  463. if "vid" not in feeds[m]:
  464. video_id = 0
  465. else:
  466. video_id = feeds[m]["vid"]
  467. # play_cnt
  468. if "read_num" not in feeds[m]:
  469. play_cnt = 0
  470. else:
  471. play_cnt = feeds[m]["read_num"]
  472. # like_cnt
  473. if "like_num" not in feeds[m]:
  474. like_cnt = 0
  475. else:
  476. like_cnt = feeds[m]["like_num"]
  477. # duration
  478. if "duration" not in feeds[m]:
  479. duration = 0
  480. else:
  481. duration = feeds[m]["duration"]
  482. # video_width / video_height
  483. if "videoWidth" not in feeds[m] or "videoHeight" not in feeds[m]:
  484. video_width = 0
  485. video_height = 0
  486. else:
  487. video_width = feeds[m]["videoWidth"]
  488. video_height = feeds[m]["videoHeight"]
  489. # send_time
  490. if "pubTime" not in feeds[m]:
  491. send_time = 0
  492. else:
  493. send_time = feeds[m]["pubTime"]
  494. # user_name
  495. if "srcDisplayName" not in feeds[m]:
  496. user_name = 0
  497. else:
  498. user_name = feeds[m]["srcDisplayName"]
  499. # user_name = base64.b64decode(user_name).decode("utf-8")
  500. # user_id
  501. if "srcUserName" not in feeds[m]:
  502. user_id = 0
  503. else:
  504. user_id = feeds[m]["srcUserName"]
  505. # head_url
  506. if "head_img_url" not in feeds[m]:
  507. head_url = 0
  508. else:
  509. head_url = feeds[m]["head_img_url"]
  510. # cover_url
  511. if "cover" not in feeds[m]:
  512. cover_url = 0
  513. else:
  514. cover_url = feeds[m]["cover"]
  515. # video_url
  516. if "url" not in feeds[m]:
  517. video_url = 0
  518. else:
  519. video_url = feeds[m]["url"]
  520. # 下载链接
  521. download_url = cls.get_url(log_type, video_url)
  522. Common.logger(log_type).info("video_title:{}", video_title)
  523. Common.logger(log_type).info("video_id:{}", video_id)
  524. Common.logger(log_type).info("play_cnt:{}", play_cnt)
  525. Common.logger(log_type).info("like_cnt:{}", like_cnt)
  526. Common.logger(log_type).info("duration:{}", duration)
  527. Common.logger(log_type).info("video_width:{}", video_width)
  528. Common.logger(log_type).info("video_height:{}", video_height)
  529. Common.logger(log_type).info("send_time:{}", send_time)
  530. Common.logger(log_type).info("user_name:{}", user_name)
  531. Common.logger(log_type).info("user_id:{}", user_id)
  532. Common.logger(log_type).info("head_url:{}", head_url)
  533. Common.logger(log_type).info("cover_url:{}", cover_url)
  534. Common.logger(log_type).info("video_url:{}", video_url)
  535. Common.logger(log_type).info("download_url:{}", download_url)
  536. if video_id == 0 or video_title == 0 or duration == 0 or video_url == 0:
  537. Common.logger(log_type).info("无效视频\n")
  538. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in
  539. y]:
  540. Common.logger(log_type).info("该视频已下载\n")
  541. elif str(video_id) in [x for y in Feishu.get_values_batch(log_type, "gzh", "zWKFGb") for x in
  542. y]:
  543. Common.logger(log_type).info("该视频已在feeds中\n")
  544. else:
  545. Feishu.insert_columns(log_type, "gzh", "zWKFGb", "ROWS", 1, 2)
  546. get_feeds_time = int(time.time())
  547. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(get_feeds_time)),
  548. "推荐榜",
  549. video_title,
  550. str(video_id),
  551. play_cnt,
  552. like_cnt,
  553. duration,
  554. str(video_width) + "*" + str(video_height),
  555. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(send_time)),
  556. user_name,
  557. user_id,
  558. head_url,
  559. cover_url,
  560. video_url,
  561. download_url
  562. ]]
  563. time.sleep(1)
  564. Feishu.update_values(log_type, "gzh", "zWKFGb", "D2:T2", values)
  565. Common.logger(log_type).info("添加至recommend_feeds成功\n")
  566. except Exception as e:
  567. Common.logger(log_type).error("get_recommend_by_token异常:{}", e)
  568. # 获取视频下载链接
  569. @classmethod
  570. def get_url(cls, log_type, url):
  571. try:
  572. payload = {}
  573. headers = {
  574. 'Cookie': 'rewardsn=; wxtokenkey=777'
  575. }
  576. urllib3.disable_warnings()
  577. response = requests.get(url=url, headers=headers, data=payload, verify=False, proxies=proxies)
  578. response_list = response.text.splitlines()
  579. video_url_list = []
  580. for m in response_list:
  581. if "mpvideo.qpic.cn" in m:
  582. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  583. video_url_list.append(video_url)
  584. video_url = video_url_list[0]
  585. return video_url
  586. except Exception as e:
  587. Common.logger(log_type).error("get_url异常:{}", e)
  588. # 下载/上传
  589. @classmethod
  590. def download_publish(cls, log_type, env):
  591. try:
  592. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  593. for i in range(1, len(recommend_feeds_sheet)):
  594. download_video_title = recommend_feeds_sheet[i][5]
  595. download_video_id = recommend_feeds_sheet[i][6]
  596. download_video_play_cnt = recommend_feeds_sheet[i][7]
  597. download_video_like_cnt = recommend_feeds_sheet[i][8]
  598. download_video_duration = recommend_feeds_sheet[i][9]
  599. download_width_height = recommend_feeds_sheet[i][10]
  600. download_video_send_time = recommend_feeds_sheet[i][11]
  601. download_user_name = recommend_feeds_sheet[i][12]
  602. download_user_id = recommend_feeds_sheet[i][13]
  603. download_head_url = recommend_feeds_sheet[i][14]
  604. download_cover_url = recommend_feeds_sheet[i][15]
  605. download_video_url = recommend_feeds_sheet[i][17]
  606. download_video_comment_cnt = 0
  607. download_video_share_cnt = 0
  608. Common.logger(log_type).info("正在判断第{}行", i + 1)
  609. Common.logger(log_type).info("download_video_title:{}", download_video_title)
  610. Common.logger(log_type).info("download_video_id:{}", download_video_id)
  611. Common.logger(log_type).info("download_video_play_cnt:{}", download_video_play_cnt)
  612. Common.logger(log_type).info("download_video_duration:{}", download_video_duration)
  613. Common.logger(log_type).info("download_video_send_time:{}", download_video_send_time)
  614. Common.logger(log_type).info("download_video_url:{}\n", download_video_url)
  615. # Common.logger(log_type).info("download_video_like_cnt:{}", download_video_like_cnt)
  616. # Common.logger(log_type).info("download_width_height:{}", download_width_height)
  617. # Common.logger(log_type).info("download_user_name:{}", download_user_name)
  618. # Common.logger(log_type).info("download_user_id:{}", download_user_id)
  619. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  620. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  621. # 过滤空行
  622. if download_video_id is None or download_video_title is None or download_video_play_cnt is None:
  623. Common.logger(log_type).warning("空行,略过\n")
  624. # # 过滤敏感词
  625. # elif any(word if word in download_video_title else False for word in
  626. # cls.sensitive_words(log_type)) is True:
  627. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  628. # Common.logger(log_type).info("视频已中敏感词,删除成功\n")
  629. # return
  630. # # 下载规则
  631. # elif cls.download_rule(download_video_share_cnt, download_video_play_cnt) is False:
  632. # Feishu.dimension_range(log_type, "music_album", "69UxPo", "ROWS", i + 1, i + 1)
  633. # Common.logger(log_type).info("不满足下载规则,删除成功\n")
  634. # return
  635. # 时长小于 60s,删除
  636. elif int(download_video_duration) < 60:
  637. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  638. Common.logger(log_type).info("时长{}<60,删除成功\n", download_video_duration)
  639. return
  640. # 已下载视频表去重
  641. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "gzh", "fCs3BT")
  642. for n in m]:
  643. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  644. Common.logger(log_type).info("该视频在公众号中已下载,删除成功\n")
  645. return
  646. # 看一看已下载表去重
  647. elif str(download_video_id) in [n for m in Feishu.get_values_batch(log_type, "kanyikan", "20ce0c")
  648. for n in m]:
  649. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  650. Common.logger(log_type).info("该视频在看一看中已下载,删除成功\n")
  651. return
  652. else:
  653. # 下载封面
  654. Common.download_method(log_type=log_type, text="cover",
  655. d_name=str(download_video_title), d_url=str(download_cover_url))
  656. # 下载视频
  657. Common.download_method(log_type=log_type, text="video",
  658. d_name=str(download_video_title), d_url=str(download_video_url))
  659. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  660. with open("./crawler_gzh/videos/" + download_video_title + "/" + "info.txt",
  661. "a", encoding="UTF-8") as f_a:
  662. f_a.write(str(download_video_id) + "\n" +
  663. str(download_video_title) + "\n" +
  664. str(download_video_duration) + "\n" +
  665. str(download_video_play_cnt) + "\n" +
  666. str(download_video_comment_cnt) + "\n" +
  667. str(download_video_like_cnt) + "\n" +
  668. str(download_video_share_cnt) + "\n" +
  669. str(download_width_height) + "\n" +
  670. str(int(time.mktime(
  671. time.strptime(download_video_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  672. str(download_user_name) + "\n" +
  673. str(download_head_url) + "\n" +
  674. str(download_video_url) + "\n" +
  675. str(download_cover_url) + "\n" +
  676. "gzh")
  677. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  678. # 上传视频
  679. Common.logger(log_type).info("开始上传视频:{}".format(download_video_title))
  680. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  681. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  682. Common.logger(log_type).info("视频上传完成:{}", download_video_title)
  683. # 保存视频 ID 到云文档
  684. Common.logger(log_type).info("保存视频ID至云文档:{}", download_video_title)
  685. # 视频ID工作表,插入首行
  686. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  687. # 视频ID工作表,首行写入数据
  688. upload_time = int(time.time())
  689. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  690. "推荐榜",
  691. str(download_video_title),
  692. str(download_video_id),
  693. our_video_link,
  694. download_video_play_cnt,
  695. download_video_like_cnt,
  696. download_video_duration,
  697. str(download_width_height),
  698. str(download_video_send_time),
  699. str(download_user_name),
  700. str(download_user_id),
  701. str(download_head_url),
  702. str(download_cover_url),
  703. str(download_video_url)]]
  704. time.sleep(1)
  705. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  706. # 删除行或列,可选 ROWS、COLUMNS
  707. Feishu.dimension_range(log_type, "gzh", "zWKFGb", "ROWS", i + 1, i + 1)
  708. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_video_title)
  709. return
  710. except Exception as e:
  711. Common.logger(log_type).error("download_publish异常:{}", e)
  712. # 执行下载/上传
  713. @classmethod
  714. def run_download_publish(cls, log_type, env):
  715. try:
  716. while True:
  717. recommend_feeds_sheet = Feishu.get_values_batch(log_type, "gzh", "zWKFGb")
  718. if len(recommend_feeds_sheet) == 1:
  719. Common.logger(log_type).info("下载/上传完成")
  720. break
  721. else:
  722. cls.download_publish(log_type, env)
  723. except Exception as e:
  724. Common.logger(log_type).error("run_download_publish异常:{}", e)
  725. if __name__ == "__main__":
  726. Recommend.get_recommend_by_token("recommend")
  727. # Recommend.download_publish("recommend")
  728. # Recommend.run_download_publish("recommend", "dev")
  729. # print(Recommend.get_token_v2("recommend"))
  730. # print(token)