gzh.py 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/8/16
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import ffmpeg
  9. import requests
  10. import urllib3
  11. sys.path.append(os.getcwd())
  12. from main.common import Common
  13. from main.feishu_lib import Feishu
  14. from main.publish import Publish
  15. class GZH:
  16. # 翻页参数
  17. begin = 0
  18. # 每个用户抓取文章数量
  19. gzh_count = []
  20. # 获取已下载视频宽高、时长等信息
  21. @classmethod
  22. def get_video_info_from_local(cls, video_path):
  23. probe = ffmpeg.probe(video_path)
  24. # print('video_path: {}'.format(video_path))
  25. # format1 = probe['format']
  26. # bit_rate = int(format1['bit_rate']) / 1000
  27. # duration = format['duration']
  28. # size = int(format1['size']) / 1024 / 1024
  29. video_stream = next((stream for stream in probe['streams'] if stream['codec_type'] == 'video'), None)
  30. if video_stream is None:
  31. print('No video stream found!')
  32. return
  33. width = int(video_stream['width'])
  34. height = int(video_stream['height'])
  35. # num_frames = int(video_stream['nb_frames'])
  36. # fps = int(video_stream['r_frame_rate'].split('/')[0]) / int(video_stream['r_frame_rate'].split('/')[1])
  37. duration = float(video_stream['duration'])
  38. # print('width: {}'.format(width))
  39. # print('height: {}'.format(height))
  40. # print('num_frames: {}'.format(num_frames))
  41. # print('bit_rate: {}k'.format(bit_rate))
  42. # print('fps: {}'.format(fps))
  43. # print('size: {}MB'.format(size))
  44. # print('duration: {}'.format(duration))
  45. return width, height, duration
  46. # 获取 搜索词/token
  47. @classmethod
  48. def get_cookie_token(cls, log_type, text):
  49. try:
  50. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  51. token = sheet[0][1]
  52. cookie = sheet[1][1]
  53. if text == "cookie":
  54. return cookie
  55. elif text == "token":
  56. return token
  57. except Exception as e:
  58. Common.logger(log_type).error("get_cookie_token:{}\n", e)
  59. # 根据关键字搜索 UP 主信息,并写入电影票(勿动)
  60. @classmethod
  61. def search_user_by_word(cls, log_type):
  62. try:
  63. sheet = Feishu.get_values_batch(log_type, "gzh", "pxHL2C")
  64. for i in range(3, len(sheet)):
  65. word = sheet[i][0]
  66. index = sheet[i][1]
  67. url = "https://mp.weixin.qq.com/cgi-bin/searchbiz?"
  68. headers = {
  69. "accept": "*/*",
  70. "accept-encoding": "gzip, deflate, br",
  71. "accept-language": "zh-CN,zh;q=0.9",
  72. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  73. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  74. "&type=77&createType=5&token=1011071554&lang=zh_CN",
  75. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  76. "sec-ch-ua-mobile": "?0",
  77. "sec-ch-ua-platform": '"Windows"',
  78. "sec-fetch-dest": "empty",
  79. "sec-fetch-mode": "cors",
  80. "sec-fetch-site": "same-origin",
  81. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  82. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  83. "x-requested-with": "XMLHttpRequest",
  84. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  85. }
  86. params = {
  87. "action": "search_biz",
  88. "begin": "0",
  89. "count": "5",
  90. "query": word,
  91. "token": cls.get_cookie_token(log_type, "token"),
  92. "lang": "zh_CN",
  93. "f": "json",
  94. "ajax": "1",
  95. }
  96. urllib3.disable_warnings()
  97. r = requests.get(url=url, headers=headers, params=params, verify=False)
  98. if "list" not in r.json() or len(r.json()["list"]) == 0:
  99. Common.logger(log_type).warning("search_user_by_word:{}", r.text)
  100. else:
  101. fakeid = r.json()["list"][int(index)-1]["fakeid"]
  102. head_url = r.json()["list"][int(index)-1]["round_head_img"]
  103. time.sleep(0.5)
  104. Common.logger(log_type).info("获取{}的fakeid成功", word)
  105. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'C'+str(i+1)+':C'+str(i+1), [[fakeid]])
  106. time.sleep(0.5)
  107. Common.logger(log_type).info("获取{}的头像成功", word)
  108. Feishu.update_values(log_type, 'gzh', 'pxHL2C', 'D'+str(i+1)+':D'+str(i+1), [[head_url]])
  109. Common.logger(log_type).info("获取所有用户及ID信息完成\n")
  110. except Exception as e:
  111. Common.logger(log_type).error("search_user_by_word异常:{}\n", e)
  112. # 获取视频下载链接
  113. @classmethod
  114. def get_url(cls, log_type, url):
  115. try:
  116. payload = {}
  117. headers = {
  118. 'Cookie': 'rewardsn=; wxtokenkey=777'
  119. }
  120. urllib3.disable_warnings()
  121. response = requests.get(url=url, headers=headers, data=payload, verify=False)
  122. # print(response.text)
  123. response_list = response.text.splitlines()
  124. video_url_list = []
  125. for m in response_list:
  126. if "mpvideo.qpic.cn" in m:
  127. video_url = m.split("url: '")[1].split("',")[0].replace(r"\x26amp;", "&")
  128. video_url_list.append(video_url)
  129. video_url = video_url_list[0]
  130. return video_url
  131. except Exception as e:
  132. Common.logger(log_type).error("get_url异常:{}\n", e)
  133. # 获取公众号文章信息,并写入文章列表
  134. @classmethod
  135. def get_gzh_url(cls, log_type, username, userid, head_url):
  136. while True:
  137. try:
  138. url = "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  139. headers = {
  140. "accept": "*/*",
  141. "accept-encoding": "gzip, deflate, br",
  142. "accept-language": "zh-CN,zh;q=0.9",
  143. "referer": "https://mp.weixin.qq.com/cgi-bin/appmsg?"
  144. "t=media/appmsg_edit_v2&action=edit&isNew=1"
  145. "&type=77&createType=5&token=1011071554&lang=zh_CN",
  146. 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="100", "Google Chrome";v="100"',
  147. "sec-ch-ua-mobile": "?0",
  148. "sec-ch-ua-platform": '"Windows"',
  149. "sec-fetch-dest": "empty",
  150. "sec-fetch-mode": "cors",
  151. "sec-fetch-site": "same-origin",
  152. "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
  153. " (KHTML, like Gecko) Chrome/100.0.4896.127 Safari/537.36",
  154. "x-requested-with": "XMLHttpRequest",
  155. 'cookie': cls.get_cookie_token(log_type, "cookie"),
  156. }
  157. params = {
  158. "action": "list_ex",
  159. "begin": str(cls.begin),
  160. "count": "5",
  161. "fakeid": userid,
  162. "type": "9",
  163. "query": "",
  164. "token": cls.get_cookie_token(log_type, "token"),
  165. "lang": "zh_CN",
  166. "f": "json",
  167. "ajax": "1",
  168. }
  169. urllib3.disable_warnings()
  170. r = requests.get(url=url, headers=headers, params=params, verify=False)
  171. cls.begin += 5
  172. if 'app_msg_list' not in r.json() or len(r.json()['app_msg_list']) == 0:
  173. Common.logger(log_type).warning("get_gzh_url:response:{}\n", r.text)
  174. break
  175. else:
  176. app_msg_list = r.json()['app_msg_list']
  177. for gzh_url in app_msg_list:
  178. # print(gzh_url)
  179. # title
  180. if 'title' in gzh_url:
  181. title = gzh_url['title']
  182. else:
  183. title = 0
  184. # aid
  185. if 'aid' in gzh_url:
  186. aid = gzh_url['aid']
  187. else:
  188. aid = 0
  189. # create_time
  190. if 'create_time' in gzh_url:
  191. create_time = gzh_url['create_time']
  192. else:
  193. create_time = 0
  194. # duration
  195. if 'duration' in gzh_url:
  196. duration = gzh_url['duration']
  197. else:
  198. duration = 0
  199. # cover_url
  200. if 'cover' in gzh_url:
  201. cover_url = gzh_url['cover']
  202. else:
  203. cover_url = 0
  204. # gzh_url
  205. if 'link' in gzh_url:
  206. gzh_url = gzh_url['link']
  207. else:
  208. gzh_url = 0
  209. play_cnt = 0
  210. like_cnt = 0
  211. video_url = cls.get_url(log_type, gzh_url)
  212. Common.logger(log_type).info("title:{}", title)
  213. Common.logger(log_type).info("aid:{}", aid)
  214. Common.logger(log_type).info("create_time:{}", create_time)
  215. Common.logger(log_type).info("duration:{}", duration)
  216. Common.logger(log_type).info("cover_url:{}", cover_url)
  217. Common.logger(log_type).info("gzh_url:{}", gzh_url)
  218. # 判断无效文章
  219. if gzh_url == 0:
  220. Common.logger(log_type).info("无效文章\n")
  221. # 时长判断
  222. elif int(duration) < 60:
  223. Common.logger(log_type).info("时长:{}<60秒\n", duration)
  224. # 已下载表去重
  225. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "fCs3BT") for x in y]:
  226. Common.logger(log_type).info("文章已下载\n")
  227. # 文章去重
  228. elif str(aid) in [x for y in Feishu.get_values_batch(log_type, "gzh", "P6GKb3") for x in y]:
  229. Common.logger(log_type).info("文章已存在\n")
  230. else:
  231. # 已抓取文章列表添加当前文章ID
  232. cls.gzh_count.append(aid)
  233. # 公众号文章表插入行
  234. upload_time = time.time()
  235. Feishu.insert_columns(log_type, 'gzh', 'P6GKb3', 'ROWS', 1, 2)
  236. # 抓取到的文章写入飞书表
  237. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  238. '公众号',
  239. title,
  240. str(aid),
  241. play_cnt,
  242. like_cnt,
  243. duration,
  244. "宽*高",
  245. time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(create_time)),
  246. username,
  247. userid,
  248. head_url,
  249. cover_url,
  250. gzh_url,
  251. video_url]]
  252. time.sleep(1)
  253. Feishu.update_values(log_type, 'gzh', 'P6GKb3', 'F2:W2', values)
  254. Common.logger(log_type).info("文章写入文档成功\n")
  255. if len(cls.gzh_count) >= 10:
  256. Common.logger(log_type).info("当前用户已抓取:{}条数据\n", len(cls.gzh_count))
  257. cls.gzh_count = []
  258. return
  259. except Exception as e:
  260. Common.logger(log_type).error("get_gzh_url异常:{}\n", e)
  261. # 获取所有用户的公众号文章信息
  262. @classmethod
  263. def get_all_gzh(cls, log_type):
  264. try:
  265. user_sheet = Feishu.get_values_batch(log_type, 'gzh', 'pxHL2C')
  266. for i in range(3, len(user_sheet)):
  267. username = user_sheet[i][0]
  268. userid = user_sheet[i][2]
  269. head_url = user_sheet[i][3]
  270. Common.logger(log_type).info("获取 {} 公众号文章\n", username)
  271. cls.get_gzh_url(log_type, username, userid, head_url)
  272. except Exception as e:
  273. Common.logger(log_type).error("get_all_gzh异常:{}\n", e)
  274. # 下载/上传
  275. @classmethod
  276. def download_publish(cls, log_type, env):
  277. try:
  278. gzh_sheet = Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')
  279. for i in range(1, len(gzh_sheet)):
  280. download_title = gzh_sheet[i][7]
  281. download_vid = gzh_sheet[i][8]
  282. download_play_cnt = gzh_sheet[i][9]
  283. download_like_cnt = gzh_sheet[i][10]
  284. download_duration = gzh_sheet[i][11]
  285. download_send_time = gzh_sheet[i][13]
  286. download_username = gzh_sheet[i][14]
  287. download_userid = gzh_sheet[i][15]
  288. download_head_url = gzh_sheet[i][16]
  289. download_cover_url = gzh_sheet[i][17]
  290. download_video_url = gzh_sheet[i][19]
  291. download_video_comment_cnt = 0
  292. download_video_share_cnt = 0
  293. Common.logger(log_type).info("download_title:{}", download_title)
  294. Common.logger(log_type).info("download_send_time:{}", download_send_time)
  295. Common.logger(log_type).info("download_username:{}", download_username)
  296. Common.logger(log_type).info("download_video_url:{}", download_video_url)
  297. # Common.logger(log_type).info("download_vid:{}", download_vid)
  298. # Common.logger(log_type).info("download_play_cnt:{}", download_play_cnt)
  299. # Common.logger(log_type).info("download_like_cnt:{}", download_like_cnt)
  300. # Common.logger(log_type).info("download_duration:{}", download_duration)
  301. # Common.logger(log_type).info("download_userid:{}", download_userid)
  302. # Common.logger(log_type).info("download_head_url:{}", download_head_url)
  303. # Common.logger(log_type).info("download_cover_url:{}", download_cover_url)
  304. # 判断空行
  305. if download_video_url is None or download_title is None:
  306. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i+1, i+1)
  307. Common.logger(log_type).info("空行,删除成功\n")
  308. return
  309. # 已下载判断
  310. elif str(download_vid) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  311. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  312. Common.logger(log_type).info("视频已下载\n")
  313. return
  314. # 已下载判断
  315. elif str(download_title) in [x for y in Feishu.get_values_batch(log_type, 'gzh', 'fCs3BT') for x in y]:
  316. Feishu.dimension_range(log_type, 'gzh', 'P6GKb3', 'ROWS', i + 1, i + 1)
  317. Common.logger(log_type).info("视频已下载\n")
  318. return
  319. else:
  320. # 下载封面
  321. Common.download_method(log_type=log_type, text="cover",
  322. d_name=str(download_title), d_url=str(download_cover_url))
  323. # 下载视频
  324. Common.download_method(log_type=log_type, text="video",
  325. d_name=str(download_title), d_url=str(download_video_url))
  326. # 获取视频宽高
  327. video_info = cls.get_video_info_from_local("./videos/" + download_title + "/video.mp4")
  328. download_video_resolution = str(video_info[0]) + "*" + str(video_info[1])
  329. # 保存视频信息至 "./videos/{download_video_title}/info.txt"
  330. with open("./videos/" + download_title
  331. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  332. f_a.write(str(download_vid) + "\n" +
  333. str(download_title) + "\n" +
  334. str(int(download_duration)) + "\n" +
  335. str(download_play_cnt) + "\n" +
  336. str(download_video_comment_cnt) + "\n" +
  337. str(download_like_cnt) + "\n" +
  338. str(download_video_share_cnt) + "\n" +
  339. str(download_video_resolution) + "\n" +
  340. str(int(time.mktime(
  341. time.strptime(download_send_time, "%Y/%m/%d %H:%M:%S")))) + "\n" +
  342. str(download_username) + "\n" +
  343. str(download_head_url) + "\n" +
  344. str(download_video_url) + "\n" +
  345. str(download_cover_url) + "\n" +
  346. "benshanzhufu")
  347. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  348. # 上传视频
  349. Common.logger(log_type).info("开始上传视频:{}".format(download_title))
  350. our_video_id = Publish.upload_and_publish(log_type, env, "play")
  351. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  352. Common.logger(log_type).info("视频上传完成:{}", download_title)
  353. # 保存视频 ID 到云文档
  354. Common.logger(log_type).info("保存视频ID至云文档:{}", download_title)
  355. # 视频ID工作表,插入首行
  356. Feishu.insert_columns(log_type, "gzh", "fCs3BT", "ROWS", 1, 2)
  357. # 视频ID工作表,首行写入数据
  358. upload_time = int(time.time())
  359. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  360. "公众号",
  361. str(download_title),
  362. str(download_vid),
  363. our_video_link,
  364. download_play_cnt,
  365. download_like_cnt,
  366. download_duration,
  367. str(download_video_resolution),
  368. str(download_send_time),
  369. str(download_username),
  370. str(download_userid),
  371. str(download_head_url),
  372. str(download_cover_url),
  373. str(download_video_url)]]
  374. time.sleep(1)
  375. Feishu.update_values(log_type, "gzh", "fCs3BT", "D2:W2", values)
  376. # 删除行或列,可选 ROWS、COLUMNS
  377. Feishu.dimension_range(log_type, "gzh", "P6GKb3", "ROWS", i + 1, i + 1)
  378. Common.logger(log_type).info("视频:{},下载/上传成功\n", download_title)
  379. return
  380. except Exception as e:
  381. Common.logger(log_type).error("download_publish异常:{}\n", e)
  382. # 执行下载/上传
  383. @classmethod
  384. def run_download_publish(cls, log_type, env):
  385. try:
  386. while True:
  387. time.sleep(1)
  388. if len(Feishu.get_values_batch(log_type, 'gzh', 'P6GKb3')) == 1:
  389. Common.logger(log_type).info("下载/上传完成\n")
  390. break
  391. else:
  392. cls.download_publish(log_type, env)
  393. except Exception as e:
  394. Common.logger(log_type).error("run_download_publish异常:{}\n", e)
  395. if __name__ == "__main__":
  396. # GZH.search_user_by_word("gzh")
  397. # GZH.get_all_gzh('gzh')
  398. # GZH.download_publish('gzh', 'dev')
  399. # print(GZH.get_cookie_token('gzh', 'token'))
  400. GZH.get_gzh_url('gzh', '何静同学', 'MzkyODMzODQ2Mg==', 'http://mmbiz.qpic.cn/mmbiz_png/go7km0I9Dg3NTxRdMs8MIC6DricCibEdH3OVnEFLmspaVB67iaLdje4lCHFsdjqdXpelf5EicPwHfLWibHWCg5R5urg/0?wx_fmt=png')