haokan_follow.py 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/1/13
  4. import datetime
  5. import os
  6. import sys
  7. import time
  8. import requests
  9. import urllib3
  10. sys.path.append(os.getcwd())
  11. from main.common import Common
  12. from main.feishu_lib import Feishu
  13. from main.haokan_publish import Publish
  14. class Follow:
  15. ctime = ''
  16. @classmethod
  17. def filter_words(cls, log_type):
  18. try:
  19. filter_words_sheet = Feishu.get_values_batch(log_type, 'haokan', 'nKgHzp')
  20. filter_words_list = []
  21. for x in filter_words_sheet:
  22. for y in x:
  23. if y is None:
  24. pass
  25. else:
  26. filter_words_list.append(y)
  27. return filter_words_list
  28. except Exception as e:
  29. Common.logger(log_type).error(f'filter_words异常:{e}')
  30. @classmethod
  31. def get_users_from_feishu(cls, log_type):
  32. try:
  33. user_sheet = Feishu.get_values_batch(log_type, 'haokan', 'x4nb7H')
  34. user_dict = {}
  35. for i in range(1, len(user_sheet)):
  36. user_name = user_sheet[i][0]
  37. out_id = user_sheet[i][1]
  38. our_id = user_sheet[i][3]
  39. if user_name is None or out_id is None or our_id is None:
  40. pass
  41. else:
  42. user_dict[user_name] = str(out_id) + ',' + str(our_id)
  43. return user_dict
  44. except Exception as e:
  45. Common.logger(log_type).error(f'get_users_from_feishu异常:{e}\n')
  46. @classmethod
  47. def follow_download_rule(cls, duration, width, height):
  48. if int(duration) >= 40:
  49. if int(width) >= 0 or int(height) >= 0:
  50. return True
  51. else:
  52. return False
  53. else:
  54. return False
  55. @classmethod
  56. def get_follow_feed(cls, log_type, out_id, our_id, user_name, env):
  57. try:
  58. while True:
  59. url = 'https://haokan.baidu.com/web/author/listall?'
  60. headers = {
  61. 'Accept': '*/*',
  62. 'Accept-Encoding': 'gzip, deflate, br',
  63. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  64. 'Cache-Control': 'no-cache',
  65. 'Connection': 'keep-alive',
  66. 'Content-Type': 'application/x-www-form-urlencoded',
  67. 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0],
  68. 'Referer': 'https://haokan.baidu.com/author/'+str(out_id),
  69. 'Pragma': 'no-cache',
  70. 'Host': 'haokan.baidu.com',
  71. 'sec-ch-ua': '"Not?A_Brand";v="8", "Chromium";v="108", "Microsoft Edge";v="108"',
  72. 'sec-ch-ua-mobile': '?0',
  73. 'sec-ch-ua-platform': '"macOS"',
  74. 'Sec-Fetch-Dest': 'empty',
  75. 'Sec-Fetch-Mode': 'cors',
  76. 'Sec-Fetch-Site': 'same-origin',
  77. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36 Edg/108.0.1462.76'
  78. }
  79. params = {
  80. 'app_id': str(out_id),
  81. 'ctime': cls.ctime,
  82. 'rn': '10',
  83. 'searchAfter': '',
  84. '_api': '1'
  85. }
  86. response = requests.get(url=url, headers=headers, params=params, verify=False)
  87. if '"errno":0,' not in response.text:
  88. Common.logger(log_type).warning(f'get_follow_feed:{response.text}\n')
  89. return
  90. elif len(response.json()['data']['results']) == 0:
  91. Common.logger(log_type).info(f'get_follow_feed:{response.json()}\n')
  92. cls.ctime = 0
  93. return
  94. else:
  95. cls.ctime = response.json()['data']['ctime']
  96. follow_feeds = response.json()['data']['results']
  97. for i in range(len(follow_feeds)):
  98. # video_title
  99. if 'title' not in follow_feeds[i]['content']:
  100. video_title = ''
  101. else:
  102. video_title = follow_feeds[i]['content']['title']
  103. # video_id
  104. if 'vid' not in follow_feeds[i]['content']:
  105. video_id = ''
  106. else:
  107. video_id = follow_feeds[i]['content']['vid']
  108. # is_top
  109. if 'is_show_feature' not in follow_feeds[i]['content']:
  110. is_top = ''
  111. else:
  112. is_top = follow_feeds[i]['content']['is_show_feature']
  113. # play_cnt
  114. if 'playcnt' not in follow_feeds[i]['content']:
  115. play_cnt = ''
  116. else:
  117. play_cnt = follow_feeds[i]['content']['playcnt']
  118. # duration
  119. if 'duration' not in follow_feeds[i]['content']:
  120. duration = ''
  121. duration_stamp = ''
  122. else:
  123. duration = follow_feeds[i]['content']['duration']
  124. duration_stamp = int(duration.split(':')[0])*60 + int(duration.split(':')[-1])
  125. # publish_time
  126. if 'publish_time' not in follow_feeds[i]['content']:
  127. publish_time = ''
  128. else:
  129. publish_time = follow_feeds[i]['content']['publish_time']
  130. # publish_time_stamp
  131. if '刚刚' in publish_time:
  132. publish_time_stamp = int(time.time())
  133. elif '分钟前' in publish_time:
  134. publish_time_stamp = int(time.time()) - int(publish_time[0]) * 60
  135. elif '小时前' in publish_time:
  136. publish_time_stamp = int(time.time()) - int(publish_time[0]) * 3600
  137. elif '昨天' in publish_time:
  138. publish_time_str = (datetime.date.today() + datetime.timedelta(days=-1)).strftime("%Y/%m/%d")
  139. publish_time_stamp = int(time.mktime(time.strptime(publish_time_str, "%Y/%m/%d")))
  140. elif '天前' in publish_time:
  141. today = datetime.date.today()
  142. publish_time_str = today - datetime.timedelta(days=int(publish_time[0]))
  143. publish_time_stamp = int(time.mktime(publish_time_str.timetuple()))
  144. elif '年' in publish_time:
  145. publish_time_str = publish_time.replace('年', '/').replace('月', '/').replace('日', '')
  146. publish_time_stamp = int(time.mktime(time.strptime(publish_time_str, "%Y/%m/%d")))
  147. else:
  148. publish_time_str = publish_time.replace('月', '/').replace('日', '')
  149. this_year = datetime.datetime.now().year
  150. publish_time_stamp = int(time.mktime(time.strptime(f"{this_year}/{publish_time_str}", "%Y/%m/%d")))
  151. # cover_url
  152. if 'cover_src' in follow_feeds[i]['content']:
  153. cover_url = follow_feeds[i]['content']['cover_src']
  154. elif 'cover_src_pc' in follow_feeds[i]['content']:
  155. cover_url = follow_feeds[i]['content']['cover_src_pc']
  156. elif 'poster' in follow_feeds[i]['content']:
  157. cover_url = follow_feeds[i]['content']['poster']
  158. else:
  159. cover_url = ''
  160. if is_top is True and int(time.time()) - publish_time_stamp >= 3600*24*30:
  161. Common.logger(log_type).info(f'video_title:{video_title}')
  162. Common.logger(log_type).info(f'置顶视频,发布时间超过30天:{publish_time}\n')
  163. elif int(time.time()) - publish_time_stamp >= 3600*24*30:
  164. Common.logger(log_type).info(f'video_title:{video_title}')
  165. Common.logger(log_type).info(f'发布时间超过30天:{publish_time}\n')
  166. cls.ctime = ''
  167. return
  168. else:
  169. video_info_dict = cls.get_video_url(log_type, video_id)
  170. # video_url
  171. video_url = video_info_dict['video_url']
  172. # video_width
  173. video_width = video_info_dict['video_width']
  174. # video_height
  175. video_height = video_info_dict['video_height']
  176. Common.logger(log_type).info(f'video_title:{video_title}')
  177. # Common.logger(log_type).info(f'user_name:{user_name}')
  178. # Common.logger(log_type).info(f'out_id:{out_id}')
  179. # Common.logger(log_type).info(f'our_id:{our_id}')
  180. # Common.logger(log_type).info(f'duration_stamp:{duration_stamp}')
  181. Common.logger(log_type).info(f'duration:{duration}')
  182. Common.logger(log_type).info(f'video_width:{video_width}')
  183. Common.logger(log_type).info(f'video_height:{video_height}')
  184. Common.logger(log_type).info(f'publish_time:{publish_time}')
  185. Common.logger(log_type).info(f'video_url:{video_url}\n')
  186. video_dict = {
  187. 'video_title': video_title,
  188. 'video_id': video_id,
  189. 'play_cnt': play_cnt,
  190. 'duration': duration,
  191. 'duration_stamp': duration_stamp,
  192. 'publish_time': publish_time,
  193. 'video_width': video_width,
  194. 'video_height': video_height,
  195. 'user_name': user_name,
  196. 'cover_url': cover_url,
  197. 'video_url': video_url
  198. }
  199. cls.download_publish(log_type, video_dict, our_id, env)
  200. except Exception as e:
  201. Common.logger(log_type).error(f'get_follow_feed异常:{e}\n')
  202. @classmethod
  203. def get_video_url(cls, log_type, video_id):
  204. try:
  205. url = 'https://haokan.hao123.com/v?'
  206. params = {
  207. 'vid': str(video_id),
  208. '_format': 'json',
  209. }
  210. headers = {
  211. 'Accept': '*/*',
  212. 'Accept-Encoding': 'gzip, deflate, br',
  213. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  214. 'Cache-Control': 'no-cache',
  215. 'Connection': 'keep-alive',
  216. 'Content-Type': 'application/x-www-form-urlencoded',
  217. 'Cookie': 'PC_TAB_LOG=video_details_page; COMMON_LID=b0be69dd9fcae328d06935bd40f615cd; Hm_lvt_4aadd610dfd2f5972f1efee2653a2bc5=1669029953; hkpcvideolandquery=%u82CF%u5DDE%u6700%u5927%u7684%u4E8C%u624B%u8F66%u8D85%u5E02%uFF0C%u8F6C%u4E00%u8F6C%u91CC%u8FB9%u8C6A%u8F66%u592A%u591A%u4E86%uFF0C%u4EF7%u683C%u66F4%u8BA9%u6211%u5403%u60CA%uFF01; Hm_lpvt_4aadd610dfd2f5972f1efee2653a2bc5=1669875695; ariaDefaultTheme=undefined; reptileData=%7B%22data%22%3A%22636c55e0319da5169a60acec4a264a35c10862f8abfe2f2cc32c55eb6b0ab4de0efdfa115ea522d6d4d361dea07feae2831d3e2c16ed6b051c611ffe5aded6c9f852501759497b9fbd2132a2160e1e40e5845b41f78121ddcc3288bd077ae4e8%22%2C%22key_id%22%3A%2230%22%2C%22sign%22%3A%22f6752aac%22%7D; RT="z=1&dm=hao123.com&si=uc0q7wnm4w&ss=lb4otu71&sl=j&tt=av0&bcn=https%3A%2F%2Ffclog.baidu.com%2Flog%2Fweirwood%3Ftype%3Dperf&ld=1rdw&cl=7v6c"',
  218. 'Pragma': 'no-cache',
  219. 'Referer': 'https://haokan.hao123.com/v?vid=10623278258033022286&pd=pc&context=',
  220. 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"',
  221. 'sec-ch-ua-mobile': '?0',
  222. 'sec-ch-ua-platform': '"macOS"',
  223. 'Sec-Fetch-Dest': 'empty',
  224. 'Sec-Fetch-Mode': 'cors',
  225. 'Sec-Fetch-Site': 'same-origin',
  226. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.62',
  227. }
  228. urllib3.disable_warnings()
  229. r = requests.get(url=url, headers=headers, params=params, verify=False)
  230. if r.status_code != 200:
  231. video_url = ''
  232. video_width = ''
  233. video_height = ''
  234. Common.logger(log_type).info(f'get_video_url_response:{r.text}')
  235. elif r.json()['errno'] != 0 or len(r.json()['data']) == 0:
  236. video_url = ''
  237. video_width = ''
  238. video_height = ''
  239. Common.logger(log_type).info(f'get_video_url_response:{r.json()}')
  240. else:
  241. clarityUrl = r.json()['data']['apiData']['curVideoMeta']['clarityUrl']
  242. video_url = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['url']
  243. video_width = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['vodVideoHW'].split('$$')[-1]
  244. video_height = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['vodVideoHW'].split('$$')[0]
  245. video_info_dict = {
  246. 'video_url': video_url,
  247. 'video_width': video_width,
  248. 'video_height': video_height
  249. }
  250. return video_info_dict
  251. except Exception as e:
  252. Common.logger(log_type).error(f'get_video_url异常:{e}\n')
  253. @classmethod
  254. def download_publish(cls, log_type, video_dict, our_id, env):
  255. try:
  256. if video_dict['video_title'] == '' or video_dict['video_id'] == '' or video_dict['video_url'] == '':
  257. Common.logger(log_type).info('无效视频\n')
  258. elif int(video_dict['duration_stamp']) < 60:
  259. Common.logger(log_type).info(f'时长:{int(video_dict["duration"])} < 60s\n')
  260. elif int(video_dict['video_width']) < 720 or int(video_dict['video_height']) < 720:
  261. Common.logger(log_type).info(f'{int(video_dict["video_width"])}*{int(video_dict["video_height"])} < 720P\n')
  262. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type)) is True:
  263. Common.logger(log_type).info('已中过滤词库\n')
  264. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '5pWipX') for x in y]:
  265. Common.logger(log_type).info('视频已下载\n')
  266. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '7f05d8') for x in y]:
  267. Common.logger(log_type).info('视频已下载\n')
  268. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'kVaSjf') for x in y]:
  269. Common.logger(log_type).info('视频已下载\n')
  270. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'A5VCbq') for x in y]:
  271. Common.logger(log_type).info('视频已下载\n')
  272. else:
  273. # 下载
  274. Common.download_method(log_type, 'cover', video_dict['video_title'], video_dict['cover_url'])
  275. Common.download_method(log_type, 'video', video_dict['video_title'], video_dict['video_url'])
  276. with open("./videos/" + video_dict['video_title']
  277. + "/" + "info.txt", "a", encoding="UTF-8") as f_a:
  278. f_a.write(str(video_dict['video_id']) + "\n" +
  279. str(video_dict['video_title']) + "\n" +
  280. str(video_dict['duration_stamp']) + "\n" +
  281. '100000' + "\n" +
  282. '100000' + "\n" +
  283. '100000' + "\n" +
  284. '100000' + "\n" +
  285. '1920*1080' + "\n" +
  286. str(int(time.time())) + "\n" +
  287. str(video_dict['user_name']) + "\n" +
  288. str(video_dict['cover_url']) + "\n" +
  289. str(video_dict['video_url']) + "\n" +
  290. str(video_dict['cover_url']) + "\n" +
  291. "HAOKAN" + str(int(time.time())))
  292. Common.logger(log_type).info("==========视频信息已保存至info.txt==========")
  293. # 上传
  294. Common.logger(log_type).info(f"开始上传视频:{video_dict['video_title']}")
  295. if env == 'dev':
  296. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  297. our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  298. else:
  299. our_video_id = Publish.upload_and_publish(log_type, our_id, env)
  300. our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info"
  301. Common.logger(log_type).info(f"视频上传完成:{video_dict['video_title']}\n")
  302. # 保存视频信息至云文档
  303. Common.logger(log_type).info(f"保存视频至已下载表:{video_dict['video_title']}")
  304. Feishu.insert_columns(log_type, "haokan", "kVaSjf", "ROWS", 1, 2)
  305. upload_time = int(time.time())
  306. values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)),
  307. '定向榜',
  308. video_dict['video_title'],
  309. video_dict['video_id'],
  310. our_video_link,
  311. int(video_dict['play_cnt']),
  312. video_dict['duration'],
  313. video_dict['publish_time'],
  314. video_dict['video_width']+"*"+video_dict['video_height'],
  315. video_dict['user_name'],
  316. video_dict['cover_url'],
  317. video_dict['video_url']]]
  318. time.sleep(1)
  319. Feishu.update_values(log_type, "haokan", "kVaSjf", "F2:Z2", values)
  320. Common.logger(log_type).info(f"视频:{video_dict['video_title']},下载/上传成功\n")
  321. except Exception as e:
  322. Common.logger(log_type).error(f'download_publish异常:{e}\n')
  323. @classmethod
  324. def get_user_videos(cls, log_type, env):
  325. try:
  326. user_dict = cls.get_users_from_feishu(log_type)
  327. if len(user_dict) == 0:
  328. Common.logger(log_type).warning('用户ID列表为空\n')
  329. else:
  330. for k, v in user_dict.items():
  331. user_name = k
  332. out_id = v.split(',')[0]
  333. our_id = v.split(',')[1]
  334. Common.logger(log_type).info(f'抓取{user_name}主页视频\n')
  335. cls.get_follow_feed(log_type, out_id, our_id, user_name, env)
  336. Common.logger(log_type).info('休眠 30 秒\n')
  337. time.sleep(30)
  338. cls.ctime = ''
  339. except Exception as e:
  340. Common.logger(log_type).error(f'get_user_videos异常:{e}\n')
  341. if __name__ == '__main__':
  342. print(Follow.get_users_from_feishu('follow'))
  343. pass