# -*- coding: utf-8 -*- # @Author: wangkun # @Time: 2022/11/23 import os import sys import time import requests import urllib3 sys.path.append(os.getcwd()) from main.common import Common from main.feishu_lib import Feishu from main.haokan_publish import Publish from main.get_cookies import GetCookies class Channel: @classmethod def filter_words(cls, log_type): try: filter_words_sheet = Feishu.get_values_batch(log_type, 'haokan', 'nKgHzp') filter_words_list = [] for x in filter_words_sheet: for y in x: if y is None: pass else: filter_words_list.append(y) return filter_words_list except Exception as e: Common.logger(log_type).error(f'filter_words异常:{e}') @classmethod def download_rule(cls, play_cnt, duration): if int(play_cnt) >= 10000: if int(duration) >= 30: return True else: return False else: return False @classmethod def get_channel_from_feishu(cls, log_type): try: user_sheet = Feishu.get_values_batch(log_type, 'haokan', 'TaQXk3') user_dict = {} for i in range(1, len(user_sheet)): user_name = user_sheet[i][0] out_id = user_sheet[i][1] our_id = user_sheet[i][3] if user_name is None or out_id is None or our_id is None or i == 13: pass else: user_dict[user_name] = str(out_id) + ',' + str(our_id) return user_dict except Exception as e: Common.logger(log_type).error(f'get_tab_from_feishu异常:{e}\n') @classmethod def get_channel_feeds(cls, log_type, tab, cookies): try: url = "https://haokan.baidu.com/web/video/feed?" params = { 'tab': str(tab), 'act': 'pcFeed', 'pd': 'pc', 'num': '20', 'shuaxin_id': '16698987960000', } headers = { 'Accept': '*/*', 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6', 'Cache-Control': 'no-cache', # 'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded', # 'Cookie': str(cookies).strip().replace('\n', ''), 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0], 'Pragma': 'no-cache', 'Referer': 'https://haokan.baidu.com/tab/recommend', 'Sec-Fetch-Dest': 'empty', 'Sec-Fetch-Mode': 'cors', 'Sec-Fetch-Site': 'same-origin', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.52', 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"', 'sec-ch-ua-mobile': '?0', 'sec-ch-ua-platform': '"macOS"' } urllib3.disable_warnings() r = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False) if r.json()['errno'] != 0 or r.json()['errmsg'] != '成功': Common.logger(log_type).error(f'feeds_response:{r.json()}\n') elif len(r.json()['data']['response']['videos']) == 0: Common.logger(log_type).warning(f'feeds_response:{r.json()}\n') else: feeds = r.json()['data']['response']['videos'] return feeds except Exception as e: Common.logger(log_type).error(f'get_channel_feeds异常:{e}\n') @classmethod def get_video_url(cls, log_type, video_id, cookies): try: url = 'https://haokan.hao123.com/v?' params = { 'vid': str(video_id), '_format': 'json', } headers = { 'Accept': '*/*', 'Accept-Encoding': 'gzip, deflate', 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6', 'Cache-Control': 'no-cache', # 'Connection': 'keep-alive', 'Content-Type': 'application/x-www-form-urlencoded', 'Cookie': str(cookies).strip().replace('\n', ''), # 'Cookie': Feishu.get_values_batch(log_type, 'haokan', '5LksMx')[0][0], 'Pragma': 'no-cache', 'Referer': 'https://haokan.hao123.com/v?vid='+str(video_id)+'&pd=pc&context=', 'sec-ch-ua': '"Microsoft Edge";v="107", "Chromium";v="107", "Not=A?Brand";v="24"', 'sec-ch-ua-mobile': '?0', 'sec-ch-ua-platform': '"macOS"', 'Sec-Fetch-Dest': 'empty', 'Sec-Fetch-Mode': 'cors', 'Sec-Fetch-Site': 'same-origin', 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' 'AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/107.0.0.0 Safari/537.36 Edg/107.0.1418.62', } urllib3.disable_warnings() r = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False) if r.status_code != 200: Common.logger(log_type).info(f'get_video_url_response:{r.text}') elif r.json()['errno'] != 0 or len(r.json()['data']) == 0: Common.logger(log_type).info(f'get_video_url_response:{r.json()}') else: clarityUrl = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'] video_url = r.json()['data']['apiData']['curVideoMeta']['clarityUrl'][len(clarityUrl) - 1]['url'] return video_url except Exception as e: Common.logger(log_type).info(f'get_video_url异常:{e}\n') @classmethod def get_channel_videos(cls, log_type, tab, our_id, env, cookies): try: feeds = cls.get_channel_feeds(log_type, tab, cookies) for i in range(len(feeds)): # video_title if 'title' not in feeds[i]: video_title = 0 else: video_title = feeds[i]['title'] # video_id if 'id' not in feeds[i]: video_id = 0 else: video_id = feeds[i]['id'] # play_cnt if 'playcnt' not in feeds[i]: play_cnt = 0 else: play_cnt = feeds[i]['playcnt'] # duration if 'duration' not in feeds[i]: duration = 0 else: duration = int(feeds[i]['duration'].split(':')[0])*60 + int(feeds[i]['duration'].split(':')[-1]) # publish_time if 'publish_time' not in feeds[i]: publish_time = 0 else: publish_time = feeds[i]['publish_time'] # user_name if 'source_name' not in feeds[i]: user_name = 0 else: user_name = feeds[i]['source_name'] # head_url if 'author_avatar' not in feeds[i]: head_url = 0 else: head_url = feeds[i]['author_avatar'] # cover_url if 'poster_big' in feeds[i]: cover_url = feeds[i]['poster_big'] elif 'poster_pc' in feeds[i]: cover_url = feeds[i]['poster_pc'] elif 'poster_small' in feeds[i]: cover_url = feeds[i]['poster_small'] else: cover_url = 0 # video_url get_video_url = cls.get_video_url(log_type, video_id, cookies) if get_video_url is not None: video_url = get_video_url elif 'play_url' in feeds[i]: video_url = feeds[i]['play_url'] else: video_url = 0 Common.logger(log_type).info(f'video_title:{video_title}') Common.logger(log_type).info(f'play_cnt:{play_cnt}') Common.logger(log_type).info(f'duration:{duration}') Common.logger(log_type).info(f'video_url:{video_url}') video_dict = {'video_title': video_title, 'video_id': video_id, 'play_cnt': play_cnt, 'duration': duration, 'publish_time': publish_time, 'user_name': user_name, 'head_url': head_url, 'cover_url': cover_url, 'video_url': video_url} cls.download_publish(log_type, tab, our_id, video_dict, env) except Exception as e: Common.logger(log_type).error(f'get_channel_videos异常:{e}\n') @classmethod def download_publish(cls, log_type, tab, our_id, video_dict, env): try: if video_dict['video_title'] == 0 or video_dict['video_url'] == 0: Common.logger(log_type).info('无效视频\n') elif cls.download_rule(video_dict['play_cnt'], video_dict['duration']) is False: Common.logger(log_type).info('不满足抓取规则\n') elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type)) is True: Common.logger(log_type).info('已中过滤词库\n') elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '5pWipX') for x in y]: Common.logger(log_type).info('视频已下载\n') elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', '7f05d8') for x in y]: Common.logger(log_type).info('视频已下载\n') elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'A5VCbq') for x in y]: Common.logger(log_type).info('视频已下载\n') elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, 'haokan', 'kVaSjf') for x in y]: Common.logger(log_type).info('视频已下载\n') else: # 下载 Common.download_method(log_type, 'cover', video_dict['video_title'], video_dict['cover_url']) Common.download_method(log_type, 'video', video_dict['video_title'], video_dict['video_url']) with open("./videos/" + video_dict['video_title'] + "/" + "info.txt", "a", encoding="UTF-8") as f_a: f_a.write(str(video_dict['video_id']) + "\n" + str(video_dict['video_title']) + "\n" + str(video_dict['duration']) + "\n" + '0' + "\n" + '0' + "\n" + '0' + "\n" + '0' + "\n" + '1920*1080' + "\n" + str(int(time.time())) + "\n" + str(video_dict['user_name']) + "\n" + str(video_dict['head_url']) + "\n" + str(video_dict['video_url']) + "\n" + str(video_dict['cover_url']) + "\n" + "HAOKAN" + str(int(time.time()))) Common.logger(log_type).info("==========视频信息已保存至info.txt==========") # 上传 Common.logger(log_type).info(f"开始上传视频:{video_dict['video_title']}") if env == 'dev': our_video_id = Publish.upload_and_publish(log_type, our_id, env) our_video_link = "https://testadmin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info" else: our_video_id = Publish.upload_and_publish(log_type, our_id, env) our_video_link = "https://admin.piaoquantv.com/cms/post-detail/" + str(our_video_id) + "/info" Common.logger(log_type).info(f"视频上传完成:{video_dict['video_title']}\n") # 保存视频信息至云文档 Common.logger(log_type).info(f"保存视频至已下载表:{video_dict['video_title']}") Feishu.insert_columns(log_type, "haokan", "7f05d8", "ROWS", 1, 2) upload_time = int(time.time()) if tab == 'recommend': tab = '播放量榜_首页频道' elif tab == 'yinyue_new': tab = '播放量榜_音乐频道' elif tab == 'gaoxiao_new': tab = '播放量榜_搞笑频道' elif tab == 'zongyi_new': tab = '播放量榜_综艺频道' elif tab == 'shenghuo_new': tab = '播放量榜_生活频道' elif tab == 'meishi_new': tab = '播放量榜_美食频道' elif tab == 'sannong_new': tab = '播放量榜_三农频道' elif tab == 'junshi_new': tab = '播放量榜_军事频道' elif tab == 'shehui_new': tab = '播放量榜_社会频道' elif tab == 'keji_new': tab = '播放量榜_科技频道' elif tab == 'wenhua_new': tab = '播放量榜_文化频道' elif tab == 'lvyou_new': tab = '播放量榜_旅游频道' else: tab = '播放量榜' values = [[time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(upload_time)), tab, video_dict['video_title'], video_dict['video_id'], our_video_link, int(video_dict['play_cnt']), video_dict['duration'], video_dict['publish_time'], video_dict['user_name'], video_dict['head_url'], video_dict['cover_url'], video_dict['video_url']]] time.sleep(1) Feishu.update_values(log_type, "haokan", "7f05d8", "F2:Z2", values) Common.logger(log_type).info(f"视频:{video_dict['video_title']},下载/上传成功\n") except Exception as e: Common.logger(log_type).error(f'download_publish异常:{e}\n') @classmethod def get_all_channel_videos(cls, log_type, env): try: channel_dict = cls.get_channel_from_feishu(log_type) if len(channel_dict) == 0: Common.logger(log_type).warning('频道数量为空\n') else: for k, v in channel_dict.items(): Common.logger(log_type).info(f'正在获取 {k} 频道视频\n') cookies = GetCookies.get_cookies(v.split(',')[0]) Common.logger(log_type).info(f'cookies:{cookies}\n') cls.get_channel_videos(log_type, v.split(',')[0], v.split(',')[1], env, cookies) time.sleep(1) except Exception as e: Common.logger(log_type).error(f'get_all_channel_videos异常:{e}\n') if __name__ == '__main__': channel_cookies = GetCookies.get_cookies('recommend') Channel.get_channel_videos('channel', 'lvyou_new', '6267140', 'dev', channel_cookies) pass