# -*- coding: utf-8 -*- # @Author: wangkun # @Time: 2023/2/17 import base64 import json import os import random import shutil import string import sys import time from datetime import date, timedelta import requests import urllib3 from requests.adapters import HTTPAdapter sys.path.append(os.getcwd()) from common.scheduling_db import MysqlHelper from common.common import Common from common.feishu import Feishu from common.publish import Publish class SchedulingFollow: # 个人主页视频翻页参数 offset = 0 platform = "西瓜视频" @classmethod def get_users(cls, log_type, crawler, task, env, machine): link_list = task['spider_link'] user_list = [] for link in link_list: out_uid = int(link.split("https://www.ixigua.com/home/")[-1].replace("/", "").strip()) sql = f""" select * from crawler_author_map where spider_link="{link}" """ our_user_info = MysqlHelper.get_values(log_type=log_type, crawler=crawler, sql=sql, env=env, machine=machine) if len(our_user_info) == 0: our_uid = 0 Common.logger(log_type, crawler).info(f"没有站内虚拟账号: {link}\n") else: # print(type(our_user_info[0])) # print(our_user_info[0]) our_uid = our_user_info[0]["media_id"] user_dict = { "out_uid": out_uid, "our_uid": our_uid } user_list.append(user_dict) Common.logger(log_type, crawler).info(f"user_list:{user_list}") return user_list # 下载规则 @classmethod def download_rule_scheduling(cls, video_info_dict, task): try: play_cnt_min = int(task['play_cnt']['min']) except: play_cnt_min = 0 try: video_like_min = int(task['video_like']['min']) except: video_like_min = 0 try: share_cnt_min = int(task['share_cnt']['min']) except: share_cnt_min = 0 try: video_width_min = int(task['video_width']['min']) except: video_width_min = 0 try: video_height_min = task['video_height']['min'] except: video_height_min = 0 try: duration_min = int(task['duration_min']) except: duration_min = 0 try: duration_max = int(task['duration_max']) except: duration_max = 1000000000 if int(video_info_dict['play_cnt']) >= play_cnt_min: if int(video_info_dict['like_cnt']) >= video_like_min: if int(video_info_dict['share_cnt']) >= share_cnt_min: if duration_max >= int(video_info_dict['duration']) >= duration_min: if int(video_info_dict['video_width']) >= video_width_min: if int(video_info_dict['video_height']) >= video_height_min: return True else: return False else: return False else: return False else: return False else: return False else: return False # 过滤词库 @classmethod def filter_words(cls, log_type, crawler): try: while True: filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc') if filter_words_sheet is None: Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试") continue filter_words_list = [] for x in filter_words_sheet: for y in x: if y is None: pass else: filter_words_list.append(y) return filter_words_list except Exception as e: Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n') @classmethod def random_signature(cls): src_digits = string.digits # string_数字 src_uppercase = string.ascii_uppercase # string_大写字母 src_lowercase = string.ascii_lowercase # string_小写字母 digits_num = random.randint(1, 6) uppercase_num = random.randint(1, 26 - digits_num - 1) lowercase_num = 26 - (digits_num + uppercase_num) password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample( src_lowercase, lowercase_num) random.shuffle(password) new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB' new_password_start = new_password[0:18] new_password_end = new_password[-7:] if new_password[18] == '8': new_password = new_password_start + 'w' + new_password_end elif new_password[18] == '9': new_password = new_password_start + 'x' + new_password_end elif new_password[18] == '-': new_password = new_password_start + 'y' + new_password_end elif new_password[18] == '.': new_password = new_password_start + 'z' + new_password_end else: new_password = new_password_start + 'y' + new_password_end return new_password # 获取视频详情 @classmethod def get_video_url(cls, log_type, crawler, gid): try: url = 'https://www.ixigua.com/api/mixVideo/information?' headers = { "accept-encoding": "gzip, deflate", "accept-language": "zh-CN,zh-Hans;q=0.9", "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) " "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15", "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62", } params = { 'mixId': gid, 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC' 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA', 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r', '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px' 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94', } cookies = { 'ixigua-a-s': '1', 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB' 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA', 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7' '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8', 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3', 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad', '__ac_nonce': '06304878000964fdad287', '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb' 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8', 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882', '_tea_utm_cache_1300': 'undefined', 'support_avif': 'false', 'support_webp': 'false', 'xiguavideopcwebid': '7134967546256016900', 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc', } urllib3.disable_warnings() s = requests.session() # max_retries=3 重试3次 s.mount('http://', HTTPAdapter(max_retries=3)) s.mount('https://', HTTPAdapter(max_retries=3)) response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies(), timeout=5) response.close() if 'data' not in response.json() or response.json()['data'] == '': Common.logger(log_type, crawler).warning('get_video_info: response: {}', response) else: video_info = response.json()['data']['gidInformation']['packerData']['video'] video_url_dict = {} # video_url if 'videoResource' not in video_info: video_url_dict["video_url"] = '' video_url_dict["audio_url"] = '' video_url_dict["video_width"] = 0 video_url_dict["video_height"] = 0 elif 'dash_120fps' in video_info['videoResource']: if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']: video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1'] audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth'] video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']: video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1'] audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth'] video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']: video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1'] audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth'] video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']: video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1'] audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth'] video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \ and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \ and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \ and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \ and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0: video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1'] audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth'] video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height else: video_url_dict["video_url"] = '' video_url_dict["audio_url"] = '' video_url_dict["video_width"] = 0 video_url_dict["video_height"] = 0 elif 'dash' in video_info['videoResource']: if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']: video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1'] audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth'] video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']: video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1'] audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth'] video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']: video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1'] audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth'] video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']: video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1'] audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth'] video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif 'dynamic_video' in video_info['videoResource']['dash'] \ and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \ and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \ and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \ and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0: video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1'] audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth'] video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height else: video_url_dict["video_url"] = '' video_url_dict["audio_url"] = '' video_url_dict["video_width"] = 0 video_url_dict["video_height"] = 0 elif 'normal' in video_info['videoResource']: if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \ video_info['videoResource']['normal']['video_list']: video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1'] audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth'] video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \ video_info['videoResource']['normal']['video_list']: video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1'] audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth'] video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \ video_info['videoResource']['normal']['video_list']: video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1'] audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth'] video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \ video_info['videoResource']['normal']['video_list']: video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1'] audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth'] video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height elif 'dynamic_video' in video_info['videoResource']['normal'] \ and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \ and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \ and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \ and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0: video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][ 'backup_url_1'] audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][ 'backup_url_1'] if len(video_url) % 3 == 1: video_url += '==' elif len(video_url) % 3 == 2: video_url += '=' elif len(audio_url) % 3 == 1: audio_url += '==' elif len(audio_url) % 3 == 2: audio_url += '=' video_url = base64.b64decode(video_url).decode('utf8') audio_url = base64.b64decode(audio_url).decode('utf8') video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][ 'vwidth'] video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][ 'vheight'] video_url_dict["video_url"] = video_url video_url_dict["audio_url"] = audio_url video_url_dict["video_width"] = video_width video_url_dict["video_height"] = video_height else: video_url_dict["video_url"] = '' video_url_dict["audio_url"] = '' video_url_dict["video_width"] = 0 video_url_dict["video_height"] = 0 else: video_url_dict["video_url"] = '' video_url_dict["audio_url"] = '' video_url_dict["video_width"] = 0 video_url_dict["video_height"] = 0 return video_url_dict except Exception as e: Common.logger(log_type, crawler).error(f'get_video_url:{e}\n') @classmethod def get_videolist(cls, log_type, crawler, task, our_uid, out_uid, oss_endpoint, env, machine): try: signature = cls.random_signature() while True: url = "https://www.ixigua.com/api/videov2/author/new_video_list?" params = { 'to_user_id': str(out_uid), 'offset': str(cls.offset), 'limit': '30', 'maxBehotTime': '0', 'order': 'new', 'isHome': '0', '_signature': signature, } headers = { 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail', 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41', } urllib3.disable_warnings() s = requests.session() # max_retries=3 重试3次 s.mount('http://', HTTPAdapter(max_retries=3)) s.mount('https://', HTTPAdapter(max_retries=3)) response = s.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False, timeout=5) response.close() cls.offset += 30 if response.status_code != 200: Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n") cls.offset = 0 return elif 'data' not in response.text: Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n") cls.offset = 0 return elif 'videoList' not in response.json()["data"]: Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n") cls.offset = 0 return else: videoList = response.json()['data']['videoList'] for i in range(len(videoList)): # video_title if 'title' not in videoList[i]: video_title = 0 else: video_title = videoList[i]['title'].strip().replace('手游', '') \ .replace('/', '').replace('\/', '').replace('\n', '') # video_id if 'video_id' not in videoList[i]: video_id = 0 else: video_id = videoList[i]['video_id'] # gid if 'gid' not in videoList[i]: gid = 0 else: gid = videoList[i]['gid'] # play_cnt if 'video_detail_info' not in videoList[i]: play_cnt = 0 elif 'video_watch_count' not in videoList[i]['video_detail_info']: play_cnt = 0 else: play_cnt = videoList[i]['video_detail_info']['video_watch_count'] # comment_cnt if 'comment_count' not in videoList[i]: comment_cnt = 0 else: comment_cnt = videoList[i]['comment_count'] # like_cnt if 'digg_count' not in videoList[i]: like_cnt = 0 else: like_cnt = videoList[i]['digg_count'] # share_cnt share_cnt = 0 # video_duration if 'video_duration' not in videoList[i]: video_duration = 0 else: video_duration = int(videoList[i]['video_duration']) # send_time if 'publish_time' not in videoList[i]: publish_time = 0 else: publish_time = videoList[i]['publish_time'] publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time)) # is_top if 'is_top' not in videoList[i]: is_top = 0 else: is_top = videoList[i]['is_top'] # user_name if 'user_info' not in videoList[i]: user_name = 0 elif 'name' not in videoList[i]['user_info']: user_name = 0 else: user_name = videoList[i]['user_info']['name'] # user_id if 'user_info' not in videoList[i]: user_id = 0 elif 'user_id' not in videoList[i]['user_info']: user_id = 0 else: user_id = videoList[i]['user_info']['user_id'] # avatar_url if 'user_info' not in videoList[i]: avatar_url = 0 elif 'avatar_url' not in videoList[i]['user_info']: avatar_url = 0 else: avatar_url = videoList[i]['user_info']['avatar_url'] # cover_url if 'video_detail_info' not in videoList[i]: cover_url = 0 elif 'detail_video_large_image' not in videoList[i]['video_detail_info']: cover_url = 0 elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']: cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url'] else: cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url'] min_publish_time = int(task["min_publish_time"]) min_publish_day = int(task["min_publish_day"]) min_publish_day = (date.today() + timedelta(days=-min_publish_day)).strftime("%Y-%m-%d") min_publish_day = int(time.mktime(time.strptime(min_publish_day, "%Y-%m-%d"))) if min_publish_time > 0 and min_publish_day > 0: publish_time_rule = min_publish_time elif min_publish_time > 0: publish_time_rule = min_publish_time else: publish_time_rule = min_publish_day if gid == 0 or video_id == 0 or cover_url == 0: Common.logger(log_type, crawler).info('无效视频\n') elif is_top is True and int(publish_time) < publish_time_rule: Common.logger(log_type, crawler).info(f'置顶视频,且发布时间超过抓取时间\n') elif int(publish_time) < publish_time_rule: Common.logger(log_type, crawler).info(f'发布时间超过抓取时间\n') cls.offset = 0 return else: video_url_dict = cls.get_video_url(log_type, crawler, gid) video_url = video_url_dict["video_url"] audio_url = video_url_dict["audio_url"] video_width = video_url_dict["video_width"] video_height = video_url_dict["video_height"] video_dict = {'video_title': video_title, 'video_id': video_id, 'gid': gid, 'play_cnt': play_cnt, 'comment_cnt': comment_cnt, 'like_cnt': like_cnt, 'share_cnt': share_cnt, 'video_width': video_width, 'video_height': video_height, 'duration': video_duration, 'publish_time_stamp': publish_time, 'publish_time_str': publish_time_str, 'is_top': is_top, 'user_name': user_name, 'user_id': user_id, 'avatar_url': avatar_url, 'cover_url': cover_url, 'audio_url': audio_url, 'video_url': video_url, 'session': signature} for k, v in video_dict.items(): Common.logger(log_type, crawler).info(f"{k}:{v}") cls.download_publish(log_type=log_type, crawler=crawler, video_dict=video_dict, task=task, strategy=task["task_name"], our_uid=our_uid, oss_endpoint=oss_endpoint, env=env, machine=machine) except Exception as e: Common.logger(log_type, crawler).error(f"get_videolist:{e}\n") @classmethod def repeat_video(cls, log_type, crawler, video_id, env, machine): sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """ repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine) return len(repeat_video) # 下载 / 上传 @classmethod def download_publish(cls, log_type, crawler, strategy, video_dict, task, our_uid, oss_endpoint, env, machine): try: if cls.download_rule_scheduling(video_dict, task) is False: Common.logger(log_type, crawler).info('不满足抓取规则\n') elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True: Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title']) elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0: Common.logger(log_type, crawler).info('视频已下载\n') else: # 下载视频 Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url']) # 下载音频 Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url']) # 合成音视频 Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}") ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4") if ffmpeg_dict is None or ffmpeg_dict['size'] == 0: Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n") # 删除视频文件夹 shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}") return # 下载封面 Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url']) # 保存视频信息至txt Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict) # 上传视频 Common.logger(log_type, crawler).info("开始上传视频...") our_video_id = Publish.upload_and_publish(log_type=log_type, crawler=crawler, strategy=strategy, our_uid=our_uid, env=env, oss_endpoint=oss_endpoint) if env == 'dev': our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info" else: our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info" Common.logger(log_type, crawler).info("视频上传完成") if our_video_id is None: # 删除视频文件夹 shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}") return # 视频写入飞书 Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2) upload_time = int(time.time()) values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)), "定向榜", video_dict['video_title'], str(video_dict['video_id']), our_video_link, video_dict['gid'], video_dict['play_cnt'], video_dict['comment_cnt'], video_dict['like_cnt'], video_dict['share_cnt'], video_dict['duration'], str(video_dict['video_width']) + '*' + str(video_dict['video_height']), video_dict['publish_time_str'], video_dict['user_name'], video_dict['user_id'], video_dict['avatar_url'], video_dict['cover_url'], video_dict['video_url'], video_dict['audio_url']]] time.sleep(1) Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values) Common.logger(log_type, crawler).info(f"视频已保存至云文档\n") rule_dict = { "play_cnt": task["play_cnt"], "video_width": task["video_width"], "video_height": task["video_height"], "video_like": task["video_like"], "share_cnt": task["share_cnt"], "duration": {"min": task["duration_min"], "max": task["duration_max"]} } # 视频信息保存数据库 insert_sql = f""" insert into crawler_video(video_id, user_id, out_user_id, platform, strategy, out_video_id, video_title, cover_url, video_url, duration, publish_time, play_cnt, crawler_rule, width, height) values({our_video_id}, {our_uid}, "{video_dict['user_id']}", "{cls.platform}", "定向爬虫策略", "{video_dict['video_id']}", "{video_dict['video_title']}", "{video_dict['cover_url']}", "{video_dict['video_url']}", {int(video_dict['duration'])}, "{video_dict['publish_time_str']}", {int(video_dict['play_cnt'])}, '{json.dumps(rule_dict)}', {int(video_dict['video_width'])}, {int(video_dict['video_height'])}) """ Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}") MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine) Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n') except Exception as e: Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n') @classmethod def get_follow_videos(cls, log_type, crawler, task, oss_endpoint, env, machine): try: user_list = cls.get_users(log_type=log_type, crawler=crawler, task=task, env=env, machine=machine) for user in user_list: out_uid = user["out_uid"] our_uid = int(user["our_uid"]) if our_uid == 0: pass else: Common.logger(log_type, crawler).info(f"开始抓取 {out_uid} 用户主页视频\n") cls.get_videolist(log_type=log_type, crawler=crawler, task=task, our_uid=our_uid, out_uid=out_uid, oss_endpoint=oss_endpoint, env=env, machine=machine) cls.offset = 0 time.sleep(1) except Exception as e: Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n") if __name__ == '__main__': # SchedulingFollow.get_users(log_type="follow", # crawler="xigua", # spider_rule="['https://www.ixigua.com/home/95420624045', 'https://www.ixigua.com/home/6431477489']", # env="dev", # machine="local") print(SchedulingFollow.repeat_video("follow", "xigua", "v0201ag10000ce3jcjbc77u8jsplpgrg", "dev", "local")) pass