gzh.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/16
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import ffmpeg
  9. import requests
  10. import urllib3
  11. sys.path.append(os.getcwd())
  12. from main.common import Common
  13. from main.feishu_lib import Feishu
  14. from main.publish import Publish
  15. class GZH:
  16. # 翻页参数
  17. begin = 0
  18. # 每个用户抓取文章数量
  19. gzh_count = []
  20. # 获取已下载视频宽高、时长等信息
  21. @classmethod
  22. def get_video_info_from_local(cls, video_path):
  23. probe = ffmpeg.probe(video_path)
  24. # print('video_path: {}'.format(video_path))
  25. # format1 = probe['format']
  26. # bit_rate = int(format1['bit_rate']) / 1000
  27. # duration = format['duration']
  28. # size = int(format1['size']) / 1024 / 1024
  29. video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
  30. if video_stream is None:
  31. print('No video stream found!')
  32. return
  33. width = int(video_stream['width'])
  34. height = int(video_stream['height'])
  35. # num_frames = int(video_stream['nb_frames'])
  36. # fps = int(video_stream['r_frame_rate'].split('/')[0]) / int(video_stream['r_frame_rate'].split('/')[1])
  37. duration = float(video_stream['duration'])
  38. # print('width: {}'.format(width))
  39. # print('height: {}'.format(height))
  40. # print('num_frames: {}'.format(num_frames))
  41. # print('bit_rate: {}k'.format(bit_rate))
  42. # print('fps: {}'.format(fps))
  43. # print('size: {}MB'.format(size))
  44. # print('duration: {}'.format(duration))
  45. return width, height, duration
  46. # 获取 搜索词/token
  47. @classmethod
  48. def get_cookie_token(cls, log_type, text):
  49. try:
  50. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  51. token = sheet[0][1]
  52. cookie = sheet[1][1]
  53. if text == "cookie":
  54. return cookie
  55. elif text == "token":
  56. return token
  57. except Exception as e:
  58. Common.logger(log_type).error("get_cookie_token:{}\n", e)
  59. # 根据关键字搜索 UP 主信息,并写入电影票(勿动)
  60. @classmethod
  61. def search_user_by_word(cls, log_type):
  62. try:
  63. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  64. for i in range(3, len(sheet)):
  65. word = sheet[i][0]
  66. index = sheet[i][1]
  67. url = "https://mp.weixin.qq.com/cgi-bin/searchbiz?"
  68. headers = {
  69. "accept": "*/*",
  70. "accept-encoding": "gzip, deflate, br",
  71. "accept-language": "zh-CN,zh;q=0.9",
  72. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  73. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  74. "&type=77&createType=5&token=1011071554&lang=zh_CN",
  75. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  76. "sec-ch-ua-mobile": "?0",
  77. "sec-ch-ua-platform": '"Windows"',
  78. "sec-fetch-dest": "empty",
  79. "sec-fetch-mode": "cors",
  80. "sec-fetch-site": "same-origin",
  81. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  82. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  83. "x-requested-with": "XMLHttpRequest",
  84. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  85. }
  86. params = {
  87. "action": "search_biz",
  88. "begin": "0",
  89. "count": "5",
  90. "query": word,
  91. "token": cls.get_cookie_token(log_type, "token"),
  92. "lang": "zh_CN",
  93. "f": "json",
  94. "ajax": "1",
  95. }
  96. urllib3.disable_warnings()
  97. r = requests.get(url=url, headers=headers, params=params, verify=False)
  98. if "list" not in r.json() or len(r.json()["list"]) == 0:
  99. Common.logger(log_type).warning("search_user_by_word:{}", r.text)
  100. else:
  101. fakeid = r.json()["list"][int(index)-1]["fakeid"]
  102. head_url = r.json()["list"][int(index)-1]["round_head_img"]
  103. time.sleep(0.5)
  104. Common.logger(log_type).info("获取{}的fakeid成功", word)
  105. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'C'+str(i+1)+':C'+str(i+1), [[fakeid]])
  106. time.sleep(0.5)
  107. Common.logger(log_type).info("获取{}的头像成功", word)
  108. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'D'+str(i+1)+':D'+str(i+1), [[head_url]])
  109. Common.logger(log_type).info("获取所有用户及ID信息完成\n")
  110. except Exception as e:
  111. Common.logger(log_type).error("search_user_by_word异常:{}\n", e)
  112. # 获取视频下载链接
  113. @classmethod
  114. def get_url(cls, log_type, url):
  115. try:
  116. payload = {}
  117. headers = {
  118. 'Cookie': 'rewardsn=; wxtokenkey=777'
  119. }
  120. urllib3.disable_warnings()
  121. response = requests.get(url=url, headers=headers, data=payload, verify=False)
  122. # print(response.text)
  123. response_list = response.text.splitlines()
  124. video_url_list = []
  125. for m in response_list:
  126. if "mpvideo.qpic.cn" in m:
  127. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  128. video_url_list.append(video_url)
  129. video_url = video_url_list[0]
  130. return video_url
  131. except Exception as e:
  132. Common.logger(log_type).error("get_url异常:{}\n", e)
  133. # 获取公众号文章信息,并写入文章列表
  134. @classmethod
  135. def get_gzh_url(cls, log_type, username, userid, head_url):
  136. while True:
  137. try:
  138. url = "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  139. headers = {
  140. "accept": "*/*",
  141. "accept-encoding": "gzip, deflate, br",
  142. "accept-language": "zh-CN,zh;q=0.9",
  143. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  144. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  145. "&type=77&createType=5&token="+str(cls.get_cookie_token(log_type, "token"))+"&lang=zh_CN",
  146. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  147. "sec-ch-ua-mobile": "?0",
  148. "sec-ch-ua-platform": '"Windows"',
  149. "sec-fetch-dest": "empty",
  150. "sec-fetch-mode": "cors",
  151. "sec-fetch-site": "same-origin",
  152. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  153. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  154. "x-requested-with": "XMLHttpRequest",
  155. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  156. }
  157. params = {
  158. "action": "list_ex",
  159. "begin": str(cls.begin),
  160. "count": "5",
  161. "fakeid": userid,
  162. "type": "9",
  163. "query": "",
  164. "token": str(cls.get_cookie_token(log_type, "token")),
  165. "lang": "zh_CN",
  166. "f": "json",
  167. "ajax": "1",
  168. }
  169. urllib3.disable_warnings()
  170. r = requests.get(url=url, headers=headers, params=params, verify=False)
  171. cls.begin += 5
  172. if 'app_msg_list' not in r.json() or len(r.json()['app_msg_list']) == 0:
  173. Common.logger(log_type).warning("get_gzh_url:response:{}\n", r.text)
  174. Common.logger(log_type).info('休眠 3 - 60 分钟')
  175. time.sleep(random.randint(60*3, 60*60))
  176. break
  177. else:
  178. app_msg_list = r.json()['app_msg_list']
  179. for gzh_url in app_msg_list:
  180. # print(gzh_url)
  181. # title
  182. if 'title' in gzh_url:
  183. title = gzh_url['title']
  184. else:
  185. title = 0
  186. # aid
  187. if 'aid' in gzh_url:
  188. aid = gzh_url['aid']
  189. else:
  190. aid = 0
  191. # create_time
  192. if 'create_time' in gzh_url:
  193. create_time = gzh_url['create_time']
  194. else:
  195. create_time = 0
  196. # duration
  197. if 'duration' in gzh_url:
  198. duration = gzh_url['duration']
  199. else:
  200. duration = 0
  201. # cover_url
  202. if 'cover' in gzh_url:
  203. cover_url = gzh_url['cover']
  204. else:
  205. cover_url = 0
  206. # gzh_url
  207. if 'link' in gzh_url:
  208. gzh_url = gzh_url['link']
  209. else:
  210. gzh_url = 0
  211. play_cnt = 0
  212. like_cnt = 0
  213. video_url = cls.get_url(log_type, gzh_url)
  214. Common.logger(log_type).info("title:{}", title)
  215. Common.logger(log_type).info("aid:{}", aid)
  216. Common.logger(log_type).info("create_time:{}", create_time)
  217. Common.logger(log_type).info("duration:{}", duration)
  218. Common.logger(log_type).info("cover_url:{}", cover_url)
  219. Common.logger(log_type).info("gzh_url:{}", gzh_url)
  220. # 判断无效文章
  221. if gzh_url == 0:
  222. Common.logger(log_type).info("无效文章\n")
  223. # 时长判断
  224. elif int(duration) < 60:
  225. Common.logger(log_type).info("时长:{}<60秒\n", duration)
  226. # 已下载表去重
  227. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in y]:
  228. Common.logger(log_type).info("文章已下载\n")
  229. # 文章去重
  230. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "P6GKb3") for x in y]:
  231. Common.logger(log_type).info("文章已存在\n")
  232. else:
  233. # 已抓取文章列表添加当前文章ID
  234. cls.gzh_count.append(aid)
  235. # 公众号文章表插入行
  236. upload_time = time.time()
  237. Feishu.insert_columns(log_type, 'gzh', 'P6GKb3', 'ROWS', 1, 2)
  238. # 抓取到的文章写入飞书表
  239. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  240. '公众号',
  241. title,
  242. str(aid),
  243. play_cnt,
  244. like_cnt,
  245. duration,
  246. "宽*高",
  247. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(create_time)),
  248. username,
  249. userid,
  250. head_url,
  251. cover_url,
  252. gzh_url,
  253. video_url]]
  254. time.sleep(1)
  255. Feishu.update_values(log_type, 'gzh', 'P6GKb3', 'F2:W2', values)
  256. Common.logger(log_type).info("文章写入文档成功\n")
  257. if len(cls.gzh_count) >= 3:
  258. Common.logger(log_type).info("当前用户已抓取:{}条数据\n", len(cls.gzh_count))
  259. cls.gzh_count = []
  260. return
  261. time.sleep(60*3)
  262. except Exception as e:
  263. Common.logger(log_type).error("get_gzh_url异常:{}\n", e)
  264. # 获取所有用户的公众号文章信息
  265. @classmethod
  266. def get_all_gzh(cls, log_type, env):
  267. try:
  268. user_sheet = Feishu.get_values_batch(log_type, 'gzh', 'pxHL2C')
  269. for i in range(3, len(user_sheet)):
  270. username = user_sheet[i][0]
  271. userid = user_sheet[i][2]
  272. head_url = user_sheet[i][3]
  273. Common.logger(log_type).info("获取 {} 公众号文章\n", username)
  274. cls.get_gzh_url(log_type, username, userid, head_url)
  275. Common.logger(log_type).info("下载/上传 {} 公众号视频\n", username)
  276. cls.run_download_publish(log_type, env)
  277. Common.logger(log_type).info("休眠 3 - 30 分钟")
  278. time.sleep(random.randint(60*3, 60*30))
  279. except Exception as e:
  280. Common.logger(log_type).error("get_all_gzh异常:{}\n", e)
  281. # 下载/上传
  282. @classmethod
  283. def download_publish(cls, log_type, env):
  284. try:
  285. gzh_sheet = Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')
  286. for i in range(1, len(gzh_sheet)):
  287. download_title = gzh_sheet[i][7].strip().replace('"', '')\
  288. .replace('“', '').replace('“', '…').replace("\n", "") \
  289. .replace("/", "").replace("\r", "").replace("#", "") \
  290. .replace(".", "。").replace("\\", "").replace("&NBSP", "") \
  291. .replace(":", "").replace("*", "").replace("?", "") \
  292. .replace("?", "").replace('"', "").replace("<", "") \
  293. .replace(">", "").replace("|", "").replace(" ", "")
  294. download_vid = gzh_sheet[i][8]
  295. download_play_cnt = gzh_sheet[i][9]
  296. download_like_cnt = gzh_sheet[i][10]
  297. download_duration = gzh_sheet[i][11]
  298. download_send_time = gzh_sheet[i][13]
  299. download_username = gzh_sheet[i][14]
  300. download_userid = gzh_sheet[i][15]
  301. download_head_url = gzh_sheet[i][16]
  302. download_cover_url = gzh_sheet[i][17]
  303. download_video_url = gzh_sheet[i][19]
  304. download_video_comment_cnt = 0
  305. download_video_share_cnt = 0
  306. Common.logger(log_type).info("download_title:{}", download_title)
  307. Common.logger(log_type).info("download_send_time:{}", download_send_time)
  308. Common.logger(log_type).info("download_username:{}", download_username)
  309. Common.logger(log_type).info("download_video_url:{}", download_video_url)
  310. # Common.logger(log_type).info("download_vid:{}", download_vid)
  311. # Common.logger(log_type).info("download_play_cnt:{}", download_play_cnt)
  312. # Common.logger(log_type).info("download_like_cnt:{}", download_like_cnt)
  313. # Common.logger(log_type).info("download_duration:{}", download_duration)
  314. # Common.logger(log_type).info("download_userid:{}", download_userid)
  315. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  316. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  317. # 判断空行
  318. if download_video_url is None or download_title is None:
  319. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i+1, i+1)
  320. Common.logger(log_type).info("空行,删除成功\n")
  321. return
  322. # 已下载判断
  323. elif str(download_vid) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  324. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  325. Common.logger(log_type).info("视频已下载\n")
  326. return
  327. # 已下载判断
  328. elif str(download_title) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  329. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  330. Common.logger(log_type).info("视频已下载\n")
  331. return
  332. else:
  333. # 下载封面
  334. Common.download_method(log_type=log_type, text="cover",
  335. d_name=str(download_title), d_url=str(download_cover_url))
  336. # 下载视频
  337. Common.download_method(log_type=log_type, text="video",
  338. d_name=str(download_title), d_url=str(download_video_url))
  339. # 获取视频宽高
  340. video_info = cls.get_video_info_from_local("./videos/" + download_title + "/video.mp4")
  341. download_video_resolution = str(video_info[0]) + "*" + str(video_info[1])
  342. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  343. with open("./videos/" + download_title
  344. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  345. f_a.write(str(download_vid) + "\n" +
  346. str(download_title) + "\n" +
  347. str(int(download_duration)) + "\n" +
  348. str(download_play_cnt) + "\n" +
  349. str(download_video_comment_cnt) + "\n" +
  350. str(download_like_cnt) + "\n" +
  351. str(download_video_share_cnt) + "\n" +
  352. str(download_video_resolution) + "\n" +
  353. str(int(time.mktime(
  354. time.strptime(download_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  355. str(download_username) + "\n" +
  356. str(download_head_url) + "\n" +
  357. str(download_video_url) + "\n" +
  358. str(download_cover_url) + "\n" +
  359. "gongzhonghao\n")
  360. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  361. # 上传视频
  362. Common.logger(log_type).info("开始上传视频:{}".format(download_title))
  363. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  364. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  365. Common.logger(log_type).info("视频上传完成:{}", download_title)
  366. # 保存视频 ID 到云文档
  367. Common.logger(log_type).info("保存视频ID至云文档:{}", download_title)
  368. # 视频ID工作表,插入首行
  369. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  370. # 视频ID工作表,首行写入数据
  371. upload_time = int(time.time())
  372. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  373. "公众号",
  374. str(download_title),
  375. str(download_vid),
  376. our_video_link,
  377. download_play_cnt,
  378. download_like_cnt,
  379. download_duration,
  380. str(download_video_resolution),
  381. str(download_send_time),
  382. str(download_username),
  383. str(download_userid),
  384. str(download_head_url),
  385. str(download_cover_url),
  386. str(download_video_url)]]
  387. time.sleep(1)
  388. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  389. # 删除行或列,可选 ROWS、COLUMNS
  390. Feishu.dimension_range(log_type, "gzh", "P6GKb3", "ROWS", i + 1, i + 1)
  391. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_title)
  392. return
  393. except Exception as e:
  394. Common.logger(log_type).error("download_publish异常:{}\n", e)
  395. Feishu.dimension_range(log_type, "gzh", "P6GKb3", "ROWS", 2, 2)
  396. # 执行下载/上传
  397. @classmethod
  398. def run_download_publish(cls, log_type, env):
  399. try:
  400. while True:
  401. time.sleep(1)
  402. if len(Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')) == 1:
  403. Common.logger(log_type).info("下载/上传完成\n")
  404. break
  405. else:
  406. cls.download_publish(log_type, env)
  407. except Exception as e:
  408. Common.logger(log_type).error("run_download_publish异常:{}\n", e)
  409. if __name__ == "__main__":
  410. # GZH.search_user_by_word("gzh")
  411. # GZH.get_all_gzh('gzh')
  412. # GZH.download_publish('gzh', 'dev')
  413. # print(GZH.get_cookie_token('gzh', 'token'))
  414. GZH.get_gzh_url('gzh', '何静同学', 'MzkyODMzODQ2Mg==', 'http://mmbiz.qpic.cn/mmbiz_png/go7km0I9Dg3NTxRdMs8MIC6DricCibEdH3OVnEFLmspaVB67iaLdje4lCHFsdjqdXpelf5EicPwHfLWibHWCg5R5urg/0?wx_fmt=png')