gzh.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/16
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import ffmpeg
  9. import requests
  10. import urllib3
  11. sys.path.append(os.getcwd())
  12. from main.common import Common
  13. from main.feishu_lib import Feishu
  14. from main.publish import Publish
  15. class GZH:
  16. # 翻页参数
  17. begin = 0
  18. # 每个用户抓取文章数量
  19. gzh_count = []
  20. # 获取已下载视频宽高、时长等信息
  21. @classmethod
  22. def get_video_info_from_local(cls, video_path):
  23. probe = ffmpeg.probe(video_path)
  24. # print('video_path: {}'.format(video_path))
  25. # format1 = probe['format']
  26. # bit_rate = int(format1['bit_rate']) / 1000
  27. # duration = format['duration']
  28. # size = int(format1['size']) / 1024 / 1024
  29. video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
  30. if video_stream is None:
  31. print('No video stream found!')
  32. return
  33. width = int(video_stream['width'])
  34. height = int(video_stream['height'])
  35. # num_frames = int(video_stream['nb_frames'])
  36. # fps = int(video_stream['r_frame_rate'].split('/')[0]) / int(video_stream['r_frame_rate'].split('/')[1])
  37. duration = float(video_stream['duration'])
  38. # print('width: {}'.format(width))
  39. # print('height: {}'.format(height))
  40. # print('num_frames: {}'.format(num_frames))
  41. # print('bit_rate: {}k'.format(bit_rate))
  42. # print('fps: {}'.format(fps))
  43. # print('size: {}MB'.format(size))
  44. # print('duration: {}'.format(duration))
  45. return width, height, duration
  46. # 获取 搜索词/token
  47. @classmethod
  48. def get_cookie_token(cls, log_type, text):
  49. try:
  50. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  51. token = sheet[0][1]
  52. cookie = sheet[1][1]
  53. if text == "cookie":
  54. return cookie
  55. elif text == "token":
  56. return token
  57. except Exception as e:
  58. Common.logger(log_type).error("get_cookie_token:{}\n", e)
  59. # 根据关键字搜索 UP 主信息,并写入电影票(勿动)
  60. @classmethod
  61. def search_user_by_word(cls, log_type):
  62. try:
  63. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  64. for i in range(3, len(sheet)):
  65. word = sheet[i][0]
  66. index = sheet[i][1]
  67. url = "https://mp.weixin.qq.com/cgi-bin/searchbiz?"
  68. headers = {
  69. "accept": "*/*",
  70. "accept-encoding": "gzip, deflate, br",
  71. "accept-language": "zh-CN,zh;q=0.9",
  72. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  73. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  74. "&type=77&createType=5&token=1011071554&lang=zh_CN",
  75. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  76. "sec-ch-ua-mobile": "?0",
  77. "sec-ch-ua-platform": '"Windows"',
  78. "sec-fetch-dest": "empty",
  79. "sec-fetch-mode": "cors",
  80. "sec-fetch-site": "same-origin",
  81. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  82. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  83. "x-requested-with": "XMLHttpRequest",
  84. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  85. }
  86. params = {
  87. "action": "search_biz",
  88. "begin": "0",
  89. "count": "5",
  90. "query": word,
  91. "token": cls.get_cookie_token(log_type, "token"),
  92. "lang": "zh_CN",
  93. "f": "json",
  94. "ajax": "1",
  95. }
  96. urllib3.disable_warnings()
  97. r = requests.get(url=url, headers=headers, params=params, verify=False)
  98. if "list" not in r.json() or len(r.json()["list"]) == 0:
  99. Common.logger(log_type).warning("search_user_by_word:{}", r.text)
  100. else:
  101. fakeid = r.json()["list"][int(index)-1]["fakeid"]
  102. head_url = r.json()["list"][int(index)-1]["round_head_img"]
  103. time.sleep(0.5)
  104. Common.logger(log_type).info("获取{}的fakeid成功", word)
  105. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'C'+str(i+1)+':C'+str(i+1), [[fakeid]])
  106. time.sleep(0.5)
  107. Common.logger(log_type).info("获取{}的头像成功", word)
  108. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'D'+str(i+1)+':D'+str(i+1), [[head_url]])
  109. time.sleep(random.randint(1, 3))
  110. Common.logger(log_type).info("获取所有用户及ID信息完成\n")
  111. except Exception as e:
  112. Common.logger(log_type).error("search_user_by_word异常:{}\n", e)
  113. # 获取视频下载链接
  114. @classmethod
  115. def get_url(cls, log_type, url):
  116. try:
  117. payload = {}
  118. headers = {
  119. 'Cookie': 'rewardsn=; wxtokenkey=777'
  120. }
  121. urllib3.disable_warnings()
  122. response = requests.get(url=url, headers=headers, data=payload, verify=False)
  123. # print(response.text)
  124. response_list = response.text.splitlines()
  125. video_url_list = []
  126. for m in response_list:
  127. if "mpvideo.qpic.cn" in m:
  128. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  129. video_url_list.append(video_url)
  130. video_url = video_url_list[0]
  131. return video_url
  132. except Exception as e:
  133. Common.logger(log_type).error("get_url异常:{}\n", e)
  134. # 获取公众号文章信息,并写入文章列表
  135. @classmethod
  136. def get_gzh_url(cls, log_type, username, userid, head_url):
  137. while True:
  138. try:
  139. url = "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  140. headers = {
  141. "accept": "*/*",
  142. "accept-encoding": "gzip, deflate, br",
  143. "accept-language": "zh-CN,zh;q=0.9",
  144. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  145. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  146. "&type=77&createType=5&token="+str(cls.get_cookie_token(log_type, "token"))+"&lang=zh_CN",
  147. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  148. "sec-ch-ua-mobile": "?0",
  149. "sec-ch-ua-platform": '"Windows"',
  150. "sec-fetch-dest": "empty",
  151. "sec-fetch-mode": "cors",
  152. "sec-fetch-site": "same-origin",
  153. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  154. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  155. "x-requested-with": "XMLHttpRequest",
  156. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  157. }
  158. params = {
  159. "action": "list_ex",
  160. "begin": str(cls.begin),
  161. "count": "5",
  162. "fakeid": userid,
  163. "type": "9",
  164. "query": "",
  165. "token": str(cls.get_cookie_token(log_type, "token")),
  166. "lang": "zh_CN",
  167. "f": "json",
  168. "ajax": "1",
  169. }
  170. urllib3.disable_warnings()
  171. r = requests.get(url=url, headers=headers, params=params, verify=False)
  172. cls.begin += 5
  173. if 'app_msg_list' not in r.json() or len(r.json()['app_msg_list']) == 0:
  174. Common.logger(log_type).warning("get_gzh_url:response:{}\n", r.text)
  175. Common.logger(log_type).info('休眠 60 秒')
  176. break
  177. else:
  178. app_msg_list = r.json()['app_msg_list']
  179. for gzh_url in app_msg_list:
  180. # print(gzh_url)
  181. # title
  182. if 'title' in gzh_url:
  183. title = gzh_url['title']
  184. else:
  185. title = 0
  186. # aid
  187. if 'aid' in gzh_url:
  188. aid = gzh_url['aid']
  189. else:
  190. aid = 0
  191. # create_time
  192. if 'create_time' in gzh_url:
  193. create_time = gzh_url['create_time']
  194. else:
  195. create_time = 0
  196. # duration
  197. if 'duration' in gzh_url:
  198. duration = gzh_url['duration']
  199. else:
  200. duration = 0
  201. # cover_url
  202. if 'cover' in gzh_url:
  203. cover_url = gzh_url['cover']
  204. else:
  205. cover_url = 0
  206. # gzh_url
  207. if 'link' in gzh_url:
  208. gzh_url = gzh_url['link']
  209. else:
  210. gzh_url = 0
  211. play_cnt = 0
  212. like_cnt = 0
  213. video_url = cls.get_url(log_type, gzh_url)
  214. Common.logger(log_type).info("title:{}", title)
  215. Common.logger(log_type).info("aid:{}", aid)
  216. Common.logger(log_type).info("create_time:{}", create_time)
  217. Common.logger(log_type).info("duration:{}", duration)
  218. Common.logger(log_type).info("cover_url:{}", cover_url)
  219. Common.logger(log_type).info("gzh_url:{}", gzh_url)
  220. # 判断无效文章
  221. if gzh_url == 0:
  222. Common.logger(log_type).info("无效文章\n")
  223. elif int(time.time()) - int(create_time) > 3600*24*3:
  224. Common.logger(log_type).info(
  225. "发布时间{}超过 3 天\n", time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(create_time)))
  226. return
  227. # 时长判断
  228. elif int(duration) < 60:
  229. Common.logger(log_type).info("时长:{}<60秒\n", duration)
  230. # 已下载表去重
  231. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in y]:
  232. Common.logger(log_type).info("文章已下载\n")
  233. # 文章去重
  234. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "P6GKb3") for x in y]:
  235. Common.logger(log_type).info("文章已存在\n")
  236. else:
  237. # 已抓取文章列表添加当前文章ID
  238. cls.gzh_count.append(aid)
  239. # 公众号文章表插入行
  240. upload_time = time.time()
  241. Feishu.insert_columns(log_type, 'gzh', 'P6GKb3', 'ROWS', 1, 2)
  242. # 抓取到的文章写入飞书表
  243. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  244. '公众号',
  245. title,
  246. str(aid),
  247. play_cnt,
  248. like_cnt,
  249. duration,
  250. "宽*高",
  251. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(create_time)),
  252. username,
  253. userid,
  254. head_url,
  255. cover_url,
  256. gzh_url,
  257. video_url]]
  258. time.sleep(1)
  259. Feishu.update_values(log_type, 'gzh', 'P6GKb3', 'F2:W2', values)
  260. Common.logger(log_type).info("文章写入文档成功\n")
  261. if len(cls.gzh_count) >= 1:
  262. Common.logger(log_type).info("当前用户已抓取:{}条数据\n", len(cls.gzh_count))
  263. cls.gzh_count = []
  264. return
  265. time.sleep(10)
  266. except Exception as e:
  267. Common.logger(log_type).error("get_gzh_url异常:{}\n", e)
  268. # 获取所有用户的公众号文章信息
  269. @classmethod
  270. def get_all_gzh(cls, log_type, env):
  271. try:
  272. user_sheet = Feishu.get_values_batch(log_type, 'gzh', 'pxHL2C')
  273. for i in range(3, len(user_sheet)):
  274. username = user_sheet[i][0]
  275. userid = user_sheet[i][2]
  276. head_url = user_sheet[i][3]
  277. Common.logger(log_type).info("获取 {} 公众号文章\n", username)
  278. cls.get_gzh_url(log_type, username, userid, head_url)
  279. Common.logger(log_type).info("下载/上传 {} 公众号视频\n", username)
  280. cls.run_download_publish(log_type, env)
  281. Common.logger(log_type).info("休眠 3 - 15 分钟")
  282. time.sleep(random.randint(60*3, 60*15))
  283. except Exception as e:
  284. Common.logger(log_type).error("get_all_gzh异常:{}\n", e)
  285. # 下载/上传
  286. @classmethod
  287. def download_publish(cls, log_type, env):
  288. try:
  289. gzh_sheet = Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')
  290. for i in range(1, len(gzh_sheet)):
  291. download_title = gzh_sheet[i][7].strip().replace('"', '')\
  292. .replace('“', '').replace('“', '…').replace("\n", "") \
  293. .replace("/", "").replace("\r", "").replace("#", "") \
  294. .replace(".", "。").replace("\\", "").replace("&NBSP", "") \
  295. .replace(":", "").replace("*", "").replace("?", "") \
  296. .replace("?", "").replace('"', "").replace("<", "") \
  297. .replace(">", "").replace("|", "").replace(" ", "")
  298. download_vid = gzh_sheet[i][8]
  299. download_play_cnt = gzh_sheet[i][9]
  300. download_like_cnt = gzh_sheet[i][10]
  301. download_duration = gzh_sheet[i][11]
  302. download_send_time = gzh_sheet[i][13]
  303. download_username = gzh_sheet[i][14]
  304. download_userid = gzh_sheet[i][15]
  305. download_head_url = gzh_sheet[i][16]
  306. download_cover_url = gzh_sheet[i][17]
  307. download_video_url = gzh_sheet[i][19]
  308. download_video_comment_cnt = 0
  309. download_video_share_cnt = 0
  310. Common.logger(log_type).info("download_title:{}", download_title)
  311. Common.logger(log_type).info("download_send_time:{}", download_send_time)
  312. Common.logger(log_type).info("download_username:{}", download_username)
  313. Common.logger(log_type).info("download_video_url:{}", download_video_url)
  314. # Common.logger(log_type).info("download_vid:{}", download_vid)
  315. # Common.logger(log_type).info("download_play_cnt:{}", download_play_cnt)
  316. # Common.logger(log_type).info("download_like_cnt:{}", download_like_cnt)
  317. # Common.logger(log_type).info("download_duration:{}", download_duration)
  318. # Common.logger(log_type).info("download_userid:{}", download_userid)
  319. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  320. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  321. # 判断空行
  322. if download_video_url is None or download_title is None:
  323. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i+1, i+1)
  324. Common.logger(log_type).info("空行,删除成功\n")
  325. return
  326. # 已下载判断
  327. elif str(download_vid) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  328. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  329. Common.logger(log_type).info("视频已下载\n")
  330. return
  331. # 已下载判断
  332. elif str(download_title) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  333. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  334. Common.logger(log_type).info("视频已下载\n")
  335. return
  336. else:
  337. # 下载封面
  338. Common.download_method(log_type=log_type, text="cover",
  339. d_name=str(download_title), d_url=str(download_cover_url))
  340. # 下载视频
  341. Common.download_method(log_type=log_type, text="video",
  342. d_name=str(download_title), d_url=str(download_video_url))
  343. # 获取视频宽高
  344. video_info = cls.get_video_info_from_local("./videos/" + download_title + "/video.mp4")
  345. download_video_resolution = str(video_info[0]) + "*" + str(video_info[1])
  346. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  347. with open("./videos/" + download_title
  348. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  349. f_a.write(str(download_vid) + "\n" +
  350. str(download_title) + "\n" +
  351. str(int(download_duration)) + "\n" +
  352. str(download_play_cnt) + "\n" +
  353. str(download_video_comment_cnt) + "\n" +
  354. str(download_like_cnt) + "\n" +
  355. str(download_video_share_cnt) + "\n" +
  356. str(download_video_resolution) + "\n" +
  357. str(int(time.mktime(
  358. time.strptime(download_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  359. str(download_username) + "\n" +
  360. str(download_head_url) + "\n" +
  361. str(download_video_url) + "\n" +
  362. str(download_cover_url) + "\n" +
  363. "gongzhonghao\n")
  364. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  365. # 上传视频
  366. Common.logger(log_type).info("开始上传视频:{}".format(download_title))
  367. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  368. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  369. Common.logger(log_type).info("视频上传完成:{}", download_title)
  370. # 保存视频 ID 到云文档
  371. Common.logger(log_type).info("保存视频ID至云文档:{}", download_title)
  372. # 视频ID工作表,插入首行
  373. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  374. # 视频ID工作表,首行写入数据
  375. upload_time = int(time.time())
  376. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  377. "公众号",
  378. str(download_title),
  379. str(download_vid),
  380. our_video_link,
  381. download_play_cnt,
  382. download_like_cnt,
  383. download_duration,
  384. str(download_video_resolution),
  385. str(download_send_time),
  386. str(download_username),
  387. str(download_userid),
  388. str(download_head_url),
  389. str(download_cover_url),
  390. str(download_video_url)]]
  391. time.sleep(1)
  392. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  393. # 删除行或列,可选 ROWS、COLUMNS
  394. Feishu.dimension_range(log_type, "gzh", "P6GKb3", "ROWS", i + 1, i + 1)
  395. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_title)
  396. return
  397. except Exception as e:
  398. Common.logger(log_type).error("download_publish异常:{}\n", e)
  399. Feishu.dimension_range(log_type, "gzh", "P6GKb3", "ROWS", 2, 2)
  400. # 执行下载/上传
  401. @classmethod
  402. def run_download_publish(cls, log_type, env):
  403. try:
  404. while True:
  405. time.sleep(1)
  406. if len(Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')) == 1:
  407. Common.logger(log_type).info("下载/上传完成\n")
  408. break
  409. else:
  410. cls.download_publish(log_type, env)
  411. except Exception as e:
  412. Common.logger(log_type).error("run_download_publish异常:{}\n", e)
  413. if __name__ == "__main__":
  414. # GZH.search_user_by_word("gzh")
  415. # GZH.get_all_gzh('gzh')
  416. # GZH.download_publish('gzh', 'dev')
  417. # print(GZH.get_cookie_token('gzh', 'token'))
  418. GZH.get_gzh_url('gzh', '何静同学', 'MzkyODMzODQ2Mg==', 'http://mmbiz.qpic.cn/mmbiz_png/go7km0I9Dg3NTxRdMs8MIC6DricCibEdH3OVnEFLmspaVB67iaLdje4lCHFsdjqdXpelf5EicPwHfLWibHWCg5R5urg/0?wx_fmt=png')