haokan_channel.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/11/23
  4. import os
  5. import sys
  6. import time
  7. import requests
  8. import urllib3
  9. sys.path.append(os.getcwd())
  10. from main.common import Common
  11. from main.feishu_lib import Feishu
  12. from main.haokan_publish import Publish
  13. from main.get_cookies import GetCookies
  14. class Channel:
  15. @classmethod
  16. def filter_words(cls, log_type):
  17. try:
  18. filter_words_sheet = Feishu.get_values_batch(log_type, 'haokan', 'nKgHzp')
  19. filter_words_list = []
  20. for x in filter_words_sheet:
  21. for y in x:
  22. if y is None:
  23. pass
  24. else:
  25. filter_words_list.append(y)
  26. return filter_words_list
  27. except Exception as e:
  28. Common.logger(log_type).error(f'filter_words异常:{e}')
  29. @classmethod
  30. def download_rule(cls, play_cnt, duration):
  31. if int(play_cnt) >= 10000:
  32. if int(duration) >= 30:
  33. return True
  34. else:
  35. return False
  36. else:
  37. return False
  38. @classmethod
  39. def get_channel_from_feishu(cls, log_type):
  40. try:
  41. user_sheet = Feishu.get_values_batch(log_type, 'haokan', 'TaQXk3')
  42. user_dict = {}
  43. for i in range(1, len(user_sheet)):
  44. user_name = user_sheet[i][0]
  45. out_id = user_sheet[i][1]
  46. our_id = user_sheet[i][3]
  47. if user_name is None or out_id is None or our_id is None or i == 13:
  48. pass
  49. else:
  50. user_dict[user_name] = str(out_id) + ',' + str(our_id)
  51. return user_dict
  52. except Exception as e:
  53. Common.logger(log_type).error(f'get_tab_from_feishu异常:{e}\n')
  54. @classmethod
  55. def get_channel_feeds(cls, log_type, tab, cookies):
  56. try:
  57. url = "https://haokan.baidu.com/web/video/feed?"
  58. params = {
  59. 'tab': str(tab),
  60. 'act': 'pcFeed',
  61. 'pd': 'pc',
  62. 'num': '20',
  63. 'shuaxin_id': '16698987960000',
  64. }
  65. headers = {
  66. 'Accept': '*/*',
  67. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  68. 'Cache-Control': 'no-cache',
  69. 'Connection': 'keep-alive',
  70. 'Content-Type': 'application/x-www-form-urlencoded',
  71. # 'Cookie': str(cookies).strip().replace('\n', ''),
  72. 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0],
  73. 'Pragma': 'no-cache',
  74. 'Referer': 'https://haokan.baidu.com/tab/recommend',
  75. 'Sec-Fetch-Dest': 'empty',
  76. 'Sec-Fetch-Mode': 'cors',
  77. 'Sec-Fetch-Site': 'same-origin',
  78. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) '
  79. 'AppleWebKit/537.36 (KHTML, like Gecko) '
  80. 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.52',
  81. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  82. 'sec-ch-ua-mobile': '?0',
  83. 'sec-ch-ua-platform': '"macOS"'
  84. }
  85. urllib3.disable_warnings()
  86. r = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False)
  87. if r.json()['errno'] != 0 or r.json()['errmsg'] != '成功':
  88. Common.logger(log_type).error(f'feeds_response:{r.json()}\n')
  89. elif len(r.json()['data']['response']['videos']) == 0:
  90. Common.logger(log_type).warning(f'feeds_response:{r.json()}\n')
  91. else:
  92. feeds = r.json()['data']['response']['videos']
  93. return feeds
  94. except Exception as e:
  95. Common.logger(log_type).error(f'get_channel_feeds异常:{e}\n')
  96. @classmethod
  97. def get_video_url(cls, log_type, video_id, cookies):
  98. try:
  99. url = 'https://haokan.hao123.com/v?'
  100. params = {
  101. 'vid': str(video_id),
  102. '_format': 'json',
  103. }
  104. headers = {
  105. 'Accept': '*/*',
  106. 'Accept-Encoding': 'gzip, deflate, br',
  107. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  108. 'Cache-Control': 'no-cache',
  109. 'Connection': 'keep-alive',
  110. 'Content-Type': 'application/x-www-form-urlencoded',
  111. 'Cookie': str(cookies).strip().replace('\n', ''),
  112. # 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0],
  113. 'Pragma': 'no-cache',
  114. 'Referer': 'https://haokan.hao123.com/v?vid='+str(video_id)+'&pd=pc&context=',
  115. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  116. 'sec-ch-ua-mobile': '?0',
  117. 'sec-ch-ua-platform': '"macOS"',
  118. 'Sec-Fetch-Dest': 'empty',
  119. 'Sec-Fetch-Mode': 'cors',
  120. 'Sec-Fetch-Site': 'same-origin',
  121. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) '
  122. 'AppleWebKit/537.36 (KHTML, like Gecko) '
  123. 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.62',
  124. }
  125. urllib3.disable_warnings()
  126. r = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False)
  127. if r.status_code != 200:
  128. Common.logger(log_type).info(f'get_video_url_response:{r.text}')
  129. elif r.json()['errno'] != 0 or len(r.json()['data']) == 0:
  130. Common.logger(log_type).info(f'get_video_url_response:{r.json()}')
  131. else:
  132. clarityUrl = r.json()['data']['apiData']['curVideoMeta']['clarityUrl']
  133. video_url = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['url']
  134. return video_url
  135. except Exception as e:
  136. Common.logger(log_type).info(f'get_video_url异常:{e}\n')
  137. @classmethod
  138. def get_channel_videos(cls, log_type, tab, our_id, env, cookies):
  139. try:
  140. feeds = cls.get_channel_feeds(log_type, tab, cookies)
  141. for i in range(len(feeds)):
  142. # video_title
  143. if 'title' not in feeds[i]:
  144. video_title = 0
  145. else:
  146. video_title = feeds[i]['title']
  147. # video_id
  148. if 'id' not in feeds[i]:
  149. video_id = 0
  150. else:
  151. video_id = feeds[i]['id']
  152. # play_cnt
  153. if 'playcnt' not in feeds[i]:
  154. play_cnt = 0
  155. else:
  156. play_cnt = feeds[i]['playcnt']
  157. # duration
  158. if 'duration' not in feeds[i]:
  159. duration = 0
  160. else:
  161. duration = int(feeds[i]['duration'].split(':')[0])*60 + int(feeds[i]['duration'].split(':')[-1])
  162. # publish_time
  163. if 'publish_time' not in feeds[i]:
  164. publish_time = 0
  165. else:
  166. publish_time = feeds[i]['publish_time']
  167. # user_name
  168. if 'source_name' not in feeds[i]:
  169. user_name = 0
  170. else:
  171. user_name = feeds[i]['source_name']
  172. # head_url
  173. if 'author_avatar' not in feeds[i]:
  174. head_url = 0
  175. else:
  176. head_url = feeds[i]['author_avatar']
  177. # cover_url
  178. if 'poster_big' in feeds[i]:
  179. cover_url = feeds[i]['poster_big']
  180. elif 'poster_pc' in feeds[i]:
  181. cover_url = feeds[i]['poster_pc']
  182. elif 'poster_small' in feeds[i]:
  183. cover_url = feeds[i]['poster_small']
  184. else:
  185. cover_url = 0
  186. # video_url
  187. get_video_url = cls.get_video_url(log_type, video_id, cookies)
  188. if get_video_url is not None:
  189. video_url = get_video_url
  190. elif 'play_url' in feeds[i]:
  191. video_url = feeds[i]['play_url']
  192. else:
  193. video_url = 0
  194. Common.logger(log_type).info(f'video_title:{video_title}')
  195. Common.logger(log_type).info(f'play_cnt:{play_cnt}')
  196. Common.logger(log_type).info(f'duration:{duration}')
  197. Common.logger(log_type).info(f'video_url:{video_url}')
  198. video_dict = {'video_title': video_title,
  199. 'video_id': video_id,
  200. 'play_cnt': play_cnt,
  201. 'duration': duration,
  202. 'publish_time': publish_time,
  203. 'user_name': user_name,
  204. 'head_url': head_url,
  205. 'cover_url': cover_url,
  206. 'video_url': video_url}
  207. cls.download_publish(log_type, tab, our_id, video_dict, env)
  208. except Exception as e:
  209. Common.logger(log_type).error(f'get_channel_videos异常:{e}\n')
  210. @classmethod
  211. def download_publish(cls, log_type, tab, our_id, video_dict, env):
  212. try:
  213. if video_dict['video_title'] == 0 or video_dict['video_url'] == 0:
  214. Common.logger(log_type).info('无效视频\n')
  215. elif cls.download_rule(video_dict['play_cnt'], video_dict['duration']) is False:
  216. Common.logger(log_type).info('不满足抓取规则\n')
  217. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type)) is True:
  218. Common.logger(log_type).info('已中过滤词库\n')
  219. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '5pWipX') for x in y]:
  220. Common.logger(log_type).info('视频已下载\n')
  221. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '7f05d8') for x in y]:
  222. Common.logger(log_type).info('视频已下载\n')
  223. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'A5VCbq') for x in y]:
  224. Common.logger(log_type).info('视频已下载\n')
  225. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'kVaSjf') for x in y]:
  226. Common.logger(log_type).info('视频已下载\n')
  227. else:
  228. # 下载
  229. Common.download_method(log_type, 'cover', video_dict['video_title'], video_dict['cover_url'])
  230. Common.download_method(log_type, 'video', video_dict['video_title'], video_dict['video_url'])
  231. with open("./videos/" + video_dict['video_title']
  232. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  233. f_a.write(str(video_dict['video_id']) + "\n" +
  234. str(video_dict['video_title']) + "\n" +
  235. str(video_dict['duration']) + "\n" +
  236. '0' + "\n" +
  237. '0' + "\n" +
  238. '0' + "\n" +
  239. '0' + "\n" +
  240. '1920*1080' + "\n" +
  241. str(int(time.time())) + "\n" +
  242. str(video_dict['user_name']) + "\n" +
  243. str(video_dict['head_url']) + "\n" +
  244. str(video_dict['video_url']) + "\n" +
  245. str(video_dict['cover_url']) + "\n" +
  246. "HAOKAN" + str(int(time.time())))
  247. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  248. # 上传
  249. Common.logger(log_type).info(f"开始上传视频:{video_dict['video_title']}")
  250. if env == 'dev':
  251. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  252. our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  253. else:
  254. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  255. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  256. Common.logger(log_type).info(f"视频上传完成:{video_dict['video_title']}\n")
  257. # 保存视频信息至云文档
  258. Common.logger(log_type).info(f"保存视频至已下载表:{video_dict['video_title']}")
  259. Feishu.insert_columns(log_type, "haokan", "7f05d8", "ROWS", 1, 2)
  260. upload_time = int(time.time())
  261. if tab == 'recommend':
  262. tab = '播放量榜_首页频道'
  263. elif tab == 'yinyue_new':
  264. tab = '播放量榜_音乐频道'
  265. elif tab == 'gaoxiao_new':
  266. tab = '播放量榜_搞笑频道'
  267. elif tab == 'zongyi_new':
  268. tab = '播放量榜_综艺频道'
  269. elif tab == 'shenghuo_new':
  270. tab = '播放量榜_生活频道'
  271. elif tab == 'meishi_new':
  272. tab = '播放量榜_美食频道'
  273. elif tab == 'sannong_new':
  274. tab = '播放量榜_三农频道'
  275. elif tab == 'junshi_new':
  276. tab = '播放量榜_军事频道'
  277. elif tab == 'shehui_new':
  278. tab = '播放量榜_社会频道'
  279. elif tab == 'keji_new':
  280. tab = '播放量榜_科技频道'
  281. elif tab == 'wenhua_new':
  282. tab = '播放量榜_文化频道'
  283. elif tab == 'lvyou_new':
  284. tab = '播放量榜_旅游频道'
  285. else:
  286. tab = '播放量榜'
  287. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  288. tab,
  289. video_dict['video_title'],
  290. video_dict['video_id'],
  291. our_video_link,
  292. int(video_dict['play_cnt']),
  293. video_dict['duration'],
  294. video_dict['publish_time'],
  295. video_dict['user_name'],
  296. video_dict['head_url'],
  297. video_dict['cover_url'],
  298. video_dict['video_url']]]
  299. time.sleep(1)
  300. Feishu.update_values(log_type, "haokan", "7f05d8", "F2:Z2", values)
  301. Common.logger(log_type).info(f"视频:{video_dict['video_title']},下载/上传成功\n")
  302. except Exception as e:
  303. Common.logger(log_type).error(f'download_publish异常:{e}\n')
  304. @classmethod
  305. def get_all_channel_videos(cls, log_type, env):
  306. try:
  307. channel_dict = cls.get_channel_from_feishu(log_type)
  308. if len(channel_dict) == 0:
  309. Common.logger(log_type).warning('频道数量为空\n')
  310. else:
  311. for k, v in channel_dict.items():
  312. Common.logger(log_type).info(f'正在获取 {k} 频道视频\n')
  313. cookies = GetCookies.get_cookies(v.split(',')[0])
  314. Common.logger(log_type).info(f'cookies:{cookies}\n')
  315. cls.get_channel_videos(log_type, v.split(',')[0], v.split(',')[1], env, cookies)
  316. time.sleep(10)
  317. except Exception as e:
  318. Common.logger(log_type).error(f'get_all_channel_videos异常:{e}\n')
  319. if __name__ == '__main__':
  320. channel_cookies = GetCookies.get_cookies('recommend')
  321. Channel.get_channel_videos('channel', 'lvyou_new', '6267140', 'dev', channel_cookies)
  322. pass