haokan_hot.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2022/11/23
  4. import os
  5. import sys
  6. import time
  7. import requests
  8. import urllib3
  9. sys.path.append(os.getcwd())
  10. from main.common import Common
  11. from main.feishu_lib import Feishu
  12. from main.haokan_publish import Publish
  13. class Hot:
  14. page = 0
  15. @classmethod
  16. def filter_words(cls, log_type):
  17. try:
  18. filter_words_sheet = Feishu.get_values_batch(log_type, 'haokan', 'nKgHzp')
  19. filter_words_list = []
  20. for x in filter_words_sheet:
  21. for y in x:
  22. if y is None:
  23. pass
  24. else:
  25. filter_words_list.append(y)
  26. return filter_words_list
  27. except Exception as e:
  28. Common.logger(log_type).error(f'filter_words异常:{e}')
  29. @classmethod
  30. def get_hot_feeds(cls, log_type, our_id, env):
  31. try:
  32. while True:
  33. cls.page += 1
  34. url = 'https://haokan.baidu.com/videoui/page/pc/toplist?'
  35. headers = {
  36. 'Accept-Encoding': 'gzip, deflate, br',
  37. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  38. 'Cache-Control': 'no-cache',
  39. 'Content-Type': 'application/x-www-form-urlencoded',
  40. 'Cookie': 'BIDUPSID=0C817797C726E2312710D870ECDAE8A2; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; PSTM=1669001132; BAIDUID=AB7069CAF9ECB7AA43E400D164119733:FG=1; BDSFRCVID=H54OJexroG0GmHrjfnewtKf1EeKK0gOTDYLEJs2qYShnrsPVJeC6EG0PtoWQkz--EHtdogKK0mOTHv8F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF=tR333R7oKRu_HRjYbb__-P4DePAttURZ56bHWh0M3b61qRcIh4ob5MPEDto-BMPj52OnKUT13lc5h4jX0P7_KRtr346-35543bRTLn76LRv0Kj6HybOfhP-UyN3LWh37bJblMKoaMp78jR093JO4y4Ldj4oxJpOJ5JbMopCafJOKHICGDTA-jMK; Hm_lvt_4aadd610dfd2f5972f1efee2653a2bc5=1669029805; PC_TAB_LOG=video_details_page; COMMON_LID=88bc9b0fbce964fbb6a76cfd7927d02b; hkpcvideolandquery=%u4E2D%u56FD%u7537%u513F%u5218%u5F3A%u6012%u70E7%u9756%u56FD%u795E%u793E%uFF0C%u518D%u70E7%u65E5%u672C%u9A7B%u97E9%u5927%u4F7F%u9986%uFF0C%u540E%u6765%u600E%u4E48%u6837%u4E86%uFF1F; BDSFRCVID_BFESS=H54OJexroG0GmHrjfnewtKf1EeKK0gOTDYLEJs2qYShnrsPVJeC6EG0PtoWQkz--EHtdogKK0mOTHv8F_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF_BFESS=tR333R7oKRu_HRjYbb__-P4DePAttURZ56bHWh0M3b61qRcIh4ob5MPEDto-BMPj52OnKUT13lc5h4jX0P7_KRtr346-35543bRTLn76LRv0Kj6HybOfhP-UyN3LWh37bJblMKoaMp78jR093JO4y4Ldj4oxJpOJ5JbMopCafJOKHICGDTA-jMK; H_PS_PSSID=36557_37769_34813_37778_37728_36806_37719_37743_26350_37787; PSINO=2; delPer=0; BA_HECTOR=8la125a48ka00la4agal0he91hnre381e; BAIDUID_BFESS=AB7069CAF9ECB7AA43E400D164119733:FG=1; ZFY=e8fgayH:A:A8S2QAjzqo6RRIm7t3wLMhAdtluiJeCoSc4:C; Hm_lpvt_4aadd610dfd2f5972f1efee2653a2bc5=1669206505; ariaDefaultTheme=undefined; RT="z=1&dm=baidu.com&si=7jsviskg99&ss=latmgdwn&sl=1&tt=10y&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf&ld=1tl"',
  41. 'Pragma': 'no-cache',
  42. 'Referer': 'https://haokan.baidu.com/',
  43. 'Sec-Fetch-Dest': 'empty',
  44. 'Sec-Fetch-Mode': 'cors',
  45. 'Sec-Fetch-Site': 'same-origin',
  46. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.52',
  47. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  48. 'sec-ch-ua-mobile': '?0',
  49. 'sec-ch-ua-platform': '"macOS"'
  50. }
  51. params = {
  52. 'type': 'hotvideo',
  53. 'sfrom': 'haokan_web_banner',
  54. 'pageSize': '20',
  55. '_format': 'json',
  56. 'page': str(cls.page)
  57. }
  58. urllib3.disable_warnings()
  59. r = requests.get(url=url, headers=headers, params=params, verify=False)
  60. if r.json()['errno'] != 0 or r.json()['error'] != '成功':
  61. Common.logger(log_type).error(f'feeds_response:{r.text}\n')
  62. elif len(r.json()['apiData']['response']['video']) == 0:
  63. Common.logger(log_type).info(f'没有新数据了\n')
  64. return
  65. else:
  66. feeds = r.json()['apiData']['response']['video']
  67. Common.logger(log_type).info(f'正在抓取第{cls.page}页\n')
  68. for i in range(len(feeds)):
  69. # video_title
  70. if 'title' not in feeds[i]:
  71. video_title = 0
  72. else:
  73. video_title = feeds[i]['title']
  74. # video_id
  75. if 'vid' not in feeds[i]:
  76. video_id = 0
  77. else:
  78. video_id = feeds[i]['vid']
  79. # duration
  80. if 'duration' not in feeds[i]:
  81. duration = 0
  82. else:
  83. duration = feeds[i]['duration']
  84. # publish_time
  85. if 'publish_time' not in feeds[i]:
  86. publish_time = 0
  87. else:
  88. publish_time = feeds[i]['publish_time']\
  89. .replace('发布时间:', '').replace('年', '/').replace('月', '/').replace('日', '')
  90. # user_name
  91. if 'author' not in feeds[i]:
  92. user_name = 0
  93. else:
  94. user_name = feeds[i]['author']
  95. # head_url
  96. if 'author_icon' not in feeds[i]:
  97. head_url = 0
  98. else:
  99. head_url = feeds[i]['author_icon']
  100. # cover_url
  101. if 'poster' not in feeds[i]:
  102. cover_url = 0
  103. else:
  104. cover_url = feeds[i]['poster']
  105. # video_url
  106. get_video_url = cls.get_video_url(log_type, video_id)
  107. if get_video_url is not None:
  108. video_url = get_video_url
  109. elif 'videoUrl' in feeds[i]:
  110. video_url = feeds[i]['videoUrl']
  111. else:
  112. video_url = 0
  113. Common.logger(log_type).info(f'video_title:{video_title}')
  114. Common.logger(log_type).info(f'video_id:{video_id}')
  115. Common.logger(log_type).info(f'duration:{duration}')
  116. Common.logger(log_type).info(f'publish_time:{publish_time}')
  117. Common.logger(log_type).info(f'user_name:{user_name}')
  118. Common.logger(log_type).info(f'head_url:{head_url}')
  119. Common.logger(log_type).info(f'cover_url:{cover_url}')
  120. Common.logger(log_type).info(f'video_url:{video_url}\n')
  121. video_dict = {'video_title': video_title,
  122. 'video_id': video_id,
  123. 'duration': duration,
  124. 'publish_time': publish_time,
  125. 'user_name': user_name,
  126. 'head_url': head_url,
  127. 'cover_url': cover_url,
  128. 'video_url': video_url}
  129. cls.download_publish(log_type, video_dict, our_id, env)
  130. time.sleep(5)
  131. except Exception as e:
  132. Common.logger(log_type).error(f'get_hot_feeds异常:{e}\n')
  133. @classmethod
  134. def get_video_url(cls, log_type, video_id):
  135. try:
  136. url = 'https://haokan.hao123.com/v?'
  137. params = {
  138. 'vid': str(video_id),
  139. '_format': 'json',
  140. }
  141. headers = {
  142. 'Accept': '*/*',
  143. 'Accept-Encoding': 'gzip, deflate, br',
  144. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  145. 'Cache-Control': 'no-cache',
  146. 'Connection': 'keep-alive',
  147. 'Content-Type': 'application/x-www-form-urlencoded',
  148. 'Cookie': 'PC_TAB_LOG=video_details_page; COMMON_LID=b0be69dd9fcae328d06935bd40f615cd; Hm_lvt_4aadd610dfd2f5972f1efee2653a2bc5=1669029953; hkpcvideolandquery=%u82CF%u5DDE%u6700%u5927%u7684%u4E8C%u624B%u8F66%u8D85%u5E02%uFF0C%u8F6C%u4E00%u8F6C%u91CC%u8FB9%u8C6A%u8F66%u592A%u591A%u4E86%uFF0C%u4EF7%u683C%u66F4%u8BA9%u6211%u5403%u60CA%uFF01; Hm_lpvt_4aadd610dfd2f5972f1efee2653a2bc5=1669875695; ariaDefaultTheme=undefined; reptileData=%7B%22data%22%3A%22636c55e0319da5169a60acec4a264a35c10862f8abfe2f2cc32c55eb6b0ab4de0efdfa115ea522d6d4d361dea07feae2831d3e2c16ed6b051c611ffe5aded6c9f852501759497b9fbd2132a2160e1e40e5845b41f78121ddcc3288bd077ae4e8%22%2C%22key_id%22%3A%2230%22%2C%22sign%22%3A%22f6752aac%22%7D; RT="z=1&dm=hao123.com&si=uc0q7wnm4w&ss=lb4otu71&sl=j&tt=av0&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf&ld=1rdw&cl=7v6c"',
  149. 'Pragma': 'no-cache',
  150. 'Referer': 'https://haokan.hao123.com/v?vid=10623278258033022286&pd=pc&context=',
  151. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  152. 'sec-ch-ua-mobile': '?0',
  153. 'sec-ch-ua-platform': '"macOS"',
  154. 'Sec-Fetch-Dest': 'empty',
  155. 'Sec-Fetch-Mode': 'cors',
  156. 'Sec-Fetch-Site': 'same-origin',
  157. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.62',
  158. }
  159. urllib3.disable_warnings()
  160. r = requests.get(url=url, headers=headers, params=params, verify=False)
  161. if r.status_code != 200:
  162. Common.logger(log_type).info(f'get_video_url_response:{r.text}')
  163. elif r.json()['errno'] != 0 or len(r.json()['data']) == 0:
  164. Common.logger(log_type).info(f'get_video_url_response:{r.json()}')
  165. else:
  166. clarityUrl = r.json()['data']['apiData']['curVideoMeta']['clarityUrl']
  167. video_url = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['url']
  168. return video_url
  169. except Exception as e:
  170. Common.logger(log_type).error(f'get_video_url异常:{e}\n')
  171. @classmethod
  172. def download_publish(cls, log_type, video_dict, our_id, env):
  173. if video_dict['video_title'] == 0 or video_dict['video_url'] == 0:
  174. Common.logger(log_type).info('无效视频\n')
  175. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type)) is True:
  176. Common.logger(log_type).info('已中过滤词库\n')
  177. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '5pWipX') for x in y]:
  178. Common.logger(log_type).info('视频已下载\n')
  179. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '7f05d8') for x in y]:
  180. Common.logger(log_type).info('视频已下载\n')
  181. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'A5VCbq') for x in y]:
  182. Common.logger(log_type).info('视频已下载\n')
  183. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'kVaSjf') for x in y]:
  184. Common.logger(log_type).info('视频已下载\n')
  185. else:
  186. # 下载
  187. Common.download_method(log_type, 'cover', video_dict['video_title'], video_dict['cover_url'])
  188. Common.download_method(log_type, 'video', video_dict['video_title'], video_dict['video_url'])
  189. with open("./videos/" + video_dict['video_title']
  190. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  191. f_a.write(str(video_dict['video_id']) + "\n" +
  192. str(video_dict['video_title']) + "\n" +
  193. str(video_dict['duration']) + "\n" +
  194. '0' + "\n" +
  195. '0' + "\n" +
  196. '0' + "\n" +
  197. '0' + "\n" +
  198. '1920*1080' + "\n" +
  199. str(int(time.mktime(time.strptime(video_dict['publish_time'], "%Y/%m/%d")))) + "\n" +
  200. str(video_dict['user_name']) + "\n" +
  201. str(video_dict['head_url']) + "\n" +
  202. str(video_dict['video_url']) + "\n" +
  203. str(video_dict['cover_url']) + "\n" +
  204. "HAOKAN" + str(int(time.time())))
  205. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  206. # 上传
  207. Common.logger(log_type).info(f"开始上传视频:{video_dict['video_title']}")
  208. if env == 'dev':
  209. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  210. our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  211. else:
  212. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  213. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  214. Common.logger(log_type).info(f"视频上传完成:{video_dict['video_title']}\n")
  215. # 保存视频信息至云文档
  216. Common.logger(log_type).info(f"保存视频至已下载表:{video_dict['video_title']}")
  217. Feishu.insert_columns(log_type, "haokan", "5pWipX", "ROWS", 1, 2)
  218. upload_time = int(time.time())
  219. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  220. "今日热播榜",
  221. video_dict['video_title'],
  222. video_dict['video_id'],
  223. our_video_link,
  224. video_dict['duration'],
  225. video_dict['publish_time'],
  226. video_dict['user_name'],
  227. video_dict['head_url'],
  228. video_dict['cover_url'],
  229. video_dict['video_url']]]
  230. time.sleep(1)
  231. Feishu.update_values(log_type, "haokan", "5pWipX", "F2:Z2", values)
  232. Common.logger(log_type).info(f"视频:{video_dict['video_title']},下载/上传成功\n")
  233. if __name__ == '__main__':
  234. Hot.get_hot_feeds('hot', '6267140', 'dev')
  235. pass