haokan_channel.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/11/23
  4. import os
  5. import sys
  6. import time
  7. import requests
  8. import urllib3
  9. sys.path.append(os.getcwd())
  10. from main.common import Common
  11. from main.feishu_lib import Feishu
  12. from main.haokan_publish import Publish
  13. from main.get_cookies import GetCookies
  14. class Channel:
  15. @classmethod
  16. def download_rule(cls, play_cnt, duration):
  17. if int(play_cnt) >= 10000:
  18. if int(duration) >= 30:
  19. return True
  20. else:
  21. return False
  22. else:
  23. return False
  24. @classmethod
  25. def get_channel_from_feishu(cls, log_type):
  26. try:
  27. user_sheet = Feishu.get_values_batch(log_type, 'haokan', 'TaQXk3')
  28. user_dict = {}
  29. for i in range(1, len(user_sheet)):
  30. user_name = user_sheet[i][0]
  31. out_id = user_sheet[i][1]
  32. our_id = user_sheet[i][3]
  33. if user_name is None or out_id is None or our_id is None or i == 13:
  34. pass
  35. else:
  36. user_dict[user_name] = str(out_id) + ',' + str(our_id)
  37. return user_dict
  38. except Exception as e:
  39. Common.logger(log_type).error(f'get_tab_from_feishu异常:{e}\n')
  40. @classmethod
  41. def get_channel_feeds(cls, log_type, tab, cookies):
  42. try:
  43. url = "https://haokan.baidu.com/web/video/feed?"
  44. params = {
  45. 'tab': str(tab),
  46. 'act': 'pcFeed',
  47. 'pd': 'pc',
  48. 'num': '20',
  49. 'shuaxin_id': '16698987960000',
  50. }
  51. headers = {
  52. 'Accept': '*/*',
  53. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  54. 'Cache-Control': 'no-cache',
  55. 'Connection': 'keep-alive',
  56. 'Content-Type': 'application/x-www-form-urlencoded',
  57. # 'Cookie': str(cookies).strip().replace('\n', ''),
  58. 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0],
  59. 'Pragma': 'no-cache',
  60. 'Referer': 'https://haokan.baidu.com/tab/recommend',
  61. 'Sec-Fetch-Dest': 'empty',
  62. 'Sec-Fetch-Mode': 'cors',
  63. 'Sec-Fetch-Site': 'same-origin',
  64. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) '
  65. 'AppleWebKit/537.36 (KHTML, like Gecko) '
  66. 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.52',
  67. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  68. 'sec-ch-ua-mobile': '?0',
  69. 'sec-ch-ua-platform': '"macOS"'
  70. }
  71. urllib3.disable_warnings()
  72. r = requests.get(url=url, headers=headers, params=params, verify=False)
  73. if r.json()['errno'] != 0 or r.json()['errmsg'] != '成功':
  74. Common.logger(log_type).error(f'feeds_response:{r.json()}\n')
  75. elif len(r.json()['data']['response']['videos']) == 0:
  76. Common.logger(log_type).warning(f'feeds_response:{r.json()}\n')
  77. else:
  78. feeds = r.json()['data']['response']['videos']
  79. return feeds
  80. except Exception as e:
  81. Common.logger(log_type).error(f'get_channel_feeds异常:{e}\n')
  82. @classmethod
  83. def get_video_url(cls, log_type, video_id, cookies):
  84. try:
  85. url = 'https://haokan.hao123.com/v?'
  86. params = {
  87. 'vid': str(video_id),
  88. '_format': 'json',
  89. }
  90. headers = {
  91. 'Accept': '*/*',
  92. 'Accept-Encoding': 'gzip, deflate, br',
  93. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  94. 'Cache-Control': 'no-cache',
  95. 'Connection': 'keep-alive',
  96. 'Content-Type': 'application/x-www-form-urlencoded',
  97. 'Cookie': str(cookies).strip().replace('\n', ''),
  98. # 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0],
  99. 'Pragma': 'no-cache',
  100. 'Referer': 'https://haokan.hao123.com/v?vid='+str(video_id)+'&pd=pc&context=',
  101. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  102. 'sec-ch-ua-mobile': '?0',
  103. 'sec-ch-ua-platform': '"macOS"',
  104. 'Sec-Fetch-Dest': 'empty',
  105. 'Sec-Fetch-Mode': 'cors',
  106. 'Sec-Fetch-Site': 'same-origin',
  107. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) '
  108. 'AppleWebKit/537.36 (KHTML, like Gecko) '
  109. 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.62',
  110. }
  111. urllib3.disable_warnings()
  112. r = requests.get(url=url, headers=headers, params=params, verify=False)
  113. if r.status_code != 200:
  114. Common.logger(log_type).info(f'get_video_url_response:{r.text}')
  115. elif r.json()['errno'] != 0 or len(r.json()['data']) == 0:
  116. Common.logger(log_type).info(f'get_video_url_response:{r.json()}')
  117. else:
  118. clarityUrl = r.json()['data']['apiData']['curVideoMeta']['clarityUrl']
  119. video_url = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['url']
  120. return video_url
  121. except Exception as e:
  122. Common.logger(log_type).info(f'get_video_url异常:{e}\n')
  123. @classmethod
  124. def get_channel_videos(cls, log_type, tab, our_id, env, cookies):
  125. try:
  126. feeds = cls.get_channel_feeds(log_type, tab, cookies)
  127. for i in range(len(feeds)):
  128. # video_title
  129. if 'title' not in feeds[i]:
  130. video_title = 0
  131. else:
  132. video_title = feeds[i]['title']
  133. # video_id
  134. if 'id' not in feeds[i]:
  135. video_id = 0
  136. else:
  137. video_id = feeds[i]['id']
  138. # play_cnt
  139. if 'playcnt' not in feeds[i]:
  140. play_cnt = 0
  141. else:
  142. play_cnt = feeds[i]['playcnt']
  143. # duration
  144. if 'duration' not in feeds[i]:
  145. duration = 0
  146. else:
  147. duration = int(feeds[i]['duration'].split(':')[0])*60 + int(feeds[i]['duration'].split(':')[-1])
  148. # publish_time
  149. if 'publish_time' not in feeds[i]:
  150. publish_time = 0
  151. else:
  152. publish_time = feeds[i]['publish_time']
  153. # user_name
  154. if 'source_name' not in feeds[i]:
  155. user_name = 0
  156. else:
  157. user_name = feeds[i]['source_name']
  158. # head_url
  159. if 'author_avatar' not in feeds[i]:
  160. head_url = 0
  161. else:
  162. head_url = feeds[i]['author_avatar']
  163. # cover_url
  164. if 'poster_big' in feeds[i]:
  165. cover_url = feeds[i]['poster_big']
  166. elif 'poster_pc' in feeds[i]:
  167. cover_url = feeds[i]['poster_pc']
  168. elif 'poster_small' in feeds[i]:
  169. cover_url = feeds[i]['poster_small']
  170. else:
  171. cover_url = 0
  172. # video_url
  173. get_video_url = cls.get_video_url(log_type, video_id, cookies)
  174. if get_video_url is not None:
  175. video_url = get_video_url
  176. elif 'play_url' in feeds[i]:
  177. video_url = feeds[i]['play_url']
  178. else:
  179. video_url = 0
  180. Common.logger(log_type).info(f'video_title:{video_title}')
  181. Common.logger(log_type).info(f'play_cnt:{play_cnt}')
  182. Common.logger(log_type).info(f'duration:{duration}')
  183. Common.logger(log_type).info(f'video_url:{video_url}')
  184. video_dict = {'video_title': video_title,
  185. 'video_id': video_id,
  186. 'play_cnt': play_cnt,
  187. 'duration': duration,
  188. 'publish_time': publish_time,
  189. 'user_name': user_name,
  190. 'head_url': head_url,
  191. 'cover_url': cover_url,
  192. 'video_url': video_url}
  193. cls.download_publish(log_type, tab, our_id, video_dict, env)
  194. except Exception as e:
  195. Common.logger(log_type).error(f'get_channel_videos异常:{e}\n')
  196. @classmethod
  197. def download_publish(cls, log_type, tab, our_id, video_dict, env):
  198. try:
  199. if video_dict['video_title'] == 0 or video_dict['video_url'] == 0:
  200. Common.logger(log_type).info('无效视频\n')
  201. elif cls.download_rule(video_dict['play_cnt'], video_dict['duration']) is False:
  202. Common.logger(log_type).info('不满足抓取规则\n')
  203. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '5pWipX') for x in y]:
  204. Common.logger(log_type).info('视频已下载\n')
  205. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '7f05d8') for x in y]:
  206. Common.logger(log_type).info('视频已下载\n')
  207. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'A5VCbq') for x in y]:
  208. Common.logger(log_type).info('视频已下载\n')
  209. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'kVaSjf') for x in y]:
  210. Common.logger(log_type).info('视频已下载\n')
  211. else:
  212. # 下载
  213. Common.download_method(log_type, 'cover', video_dict['video_title'], video_dict['cover_url'])
  214. Common.download_method(log_type, 'video', video_dict['video_title'], video_dict['video_url'])
  215. with open("./videos/" + video_dict['video_title']
  216. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  217. f_a.write(str(video_dict['video_id']) + "\n" +
  218. str(video_dict['video_title']) + "\n" +
  219. str(video_dict['duration']) + "\n" +
  220. '0' + "\n" +
  221. '0' + "\n" +
  222. '0' + "\n" +
  223. '0' + "\n" +
  224. '1920*1080' + "\n" +
  225. str(int(time.time())) + "\n" +
  226. str(video_dict['user_name']) + "\n" +
  227. str(video_dict['head_url']) + "\n" +
  228. str(video_dict['video_url']) + "\n" +
  229. str(video_dict['cover_url']) + "\n" +
  230. "HAOKAN" + str(int(time.time())))
  231. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  232. # 上传
  233. Common.logger(log_type).info(f"开始上传视频:{video_dict['video_title']}")
  234. if env == 'dev':
  235. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  236. our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  237. else:
  238. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  239. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  240. Common.logger(log_type).info(f"视频上传完成:{video_dict['video_title']}\n")
  241. # 保存视频信息至云文档
  242. Common.logger(log_type).info(f"保存视频至已下载表:{video_dict['video_title']}")
  243. Feishu.insert_columns(log_type, "haokan", "7f05d8", "ROWS", 1, 2)
  244. upload_time = int(time.time())
  245. if tab == 'recommend':
  246. tab = '播放量榜_首页频道'
  247. elif tab == 'yinyue_new':
  248. tab = '播放量榜_音乐频道'
  249. elif tab == 'gaoxiao_new':
  250. tab = '播放量榜_搞笑频道'
  251. elif tab == 'zongyi_new':
  252. tab = '播放量榜_综艺频道'
  253. elif tab == 'shenghuo_new':
  254. tab = '播放量榜_生活频道'
  255. elif tab == 'meishi_new':
  256. tab = '播放量榜_美食频道'
  257. elif tab == 'sannong_new':
  258. tab = '播放量榜_三农频道'
  259. elif tab == 'junshi_new':
  260. tab = '播放量榜_军事频道'
  261. elif tab == 'shehui_new':
  262. tab = '播放量榜_社会频道'
  263. elif tab == 'keji_new':
  264. tab = '播放量榜_科技频道'
  265. elif tab == 'wenhua_new':
  266. tab = '播放量榜_文化频道'
  267. elif tab == 'lvyou_new':
  268. tab = '播放量榜_旅游频道'
  269. else:
  270. tab = '播放量榜'
  271. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  272. tab,
  273. video_dict['video_title'],
  274. video_dict['video_id'],
  275. our_video_link,
  276. int(video_dict['play_cnt']),
  277. video_dict['duration'],
  278. video_dict['publish_time'],
  279. video_dict['user_name'],
  280. video_dict['head_url'],
  281. video_dict['cover_url'],
  282. video_dict['video_url']]]
  283. time.sleep(1)
  284. Feishu.update_values(log_type, "haokan", "7f05d8", "F2:Z2", values)
  285. Common.logger(log_type).info(f"视频:{video_dict['video_title']},下载/上传成功\n")
  286. except Exception as e:
  287. Common.logger(log_type).error(f'download_publish异常:{e}\n')
  288. @classmethod
  289. def get_all_channel_videos(cls, log_type, env):
  290. try:
  291. channel_dict = cls.get_channel_from_feishu(log_type)
  292. if len(channel_dict) == 0:
  293. Common.logger(log_type).warning('频道数量为空\n')
  294. else:
  295. for k, v in channel_dict.items():
  296. Common.logger(log_type).info(f'正在获取 {k} 频道视频\n')
  297. cookies = GetCookies.get_cookies(v.split(',')[0])
  298. Common.logger(log_type).info(f'cookies:{cookies}\n')
  299. cls.get_channel_videos(log_type, v.split(',')[0], v.split(',')[1], env, cookies)
  300. time.sleep(10)
  301. except Exception as e:
  302. Common.logger(log_type).error(f'get_all_channel_videos异常:{e}\n')
  303. if __name__ == '__main__':
  304. channel_cookies = GetCookies.get_cookies('recommend')
  305. Channel.get_channel_videos('channel', 'lvyou_new', '6267140', 'dev', channel_cookies)
  306. pass