xigua_follow.py 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. from hashlib import md5
  13. import requests
  14. import urllib3
  15. from requests.adapters import HTTPAdapter
  16. from selenium.webdriver import DesiredCapabilities
  17. from selenium.webdriver.chrome.service import Service
  18. from selenium.webdriver.common.by import By
  19. from selenium import webdriver
  20. from lxml import etree
  21. sys.path.append(os.getcwd())
  22. from common.db import MysqlHelper
  23. from common.getuser import getUser
  24. from common.common import Common
  25. from common.feishu import Feishu
  26. from common.publish import Publish
  27. class Follow:
  28. # 个人主页视频翻页参数
  29. offset = 0
  30. platform = "西瓜视频"
  31. tag = "西瓜视频爬虫,定向爬虫策略"
  32. @classmethod
  33. def get_rule(cls, log_type, crawler):
  34. try:
  35. while True:
  36. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  37. if rule_sheet is None:
  38. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  39. time.sleep(10)
  40. continue
  41. rule_dict = {
  42. "play_cnt": int(rule_sheet[1][2]),
  43. "comment_cnt": int(rule_sheet[2][2]),
  44. "like_cnt": int(rule_sheet[3][2]),
  45. "duration": int(rule_sheet[4][2]),
  46. "publish_time": int(rule_sheet[5][2]),
  47. "video_width": int(rule_sheet[6][2]),
  48. "video_height": int(rule_sheet[7][2]),
  49. }
  50. return rule_dict
  51. except Exception as e:
  52. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  53. # 下载规则
  54. @classmethod
  55. def download_rule(cls, video_info_dict, rule_dict):
  56. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  57. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  58. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  59. if video_info_dict['duration'] >= rule_dict['duration']:
  60. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  61. or video_info_dict['video_height'] >= rule_dict['video_height']:
  62. return True
  63. else:
  64. return False
  65. else:
  66. return False
  67. else:
  68. return False
  69. else:
  70. return False
  71. else:
  72. return False
  73. # 过滤词库
  74. @classmethod
  75. def filter_words(cls, log_type, crawler):
  76. try:
  77. while True:
  78. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  79. if filter_words_sheet is None:
  80. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  81. continue
  82. filter_words_list = []
  83. for x in filter_words_sheet:
  84. for y in x:
  85. if y is None:
  86. pass
  87. else:
  88. filter_words_list.append(y)
  89. return filter_words_list
  90. except Exception as e:
  91. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  92. @classmethod
  93. def get_out_user_info(cls, log_type, crawler, out_uid):
  94. try:
  95. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  96. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  97. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  98. url = f"https://www.ixigua.com/home/{out_uid}"
  99. urllib3.disable_warnings()
  100. s = requests.session()
  101. # max_retries=3 重试3次
  102. s.mount('http://', HTTPAdapter(max_retries=3))
  103. s.mount('https://', HTTPAdapter(max_retries=3))
  104. response = s.get(url=url, headers=headers, proxies=Common.tunnel_proxies(), verify=False, timeout=5).text
  105. html = etree.HTML(response)
  106. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  107. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  108. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  109. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  110. if "万" in out_follow_str:
  111. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  112. else:
  113. out_follow = int(out_follow_str.replace(",", ""))
  114. if "万" in out_fans_str:
  115. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  116. else:
  117. out_fans = int(out_fans_str.replace(",", ""))
  118. if "万" in out_like_str:
  119. out_like = int(float(out_like_str.split("万")[0])*10000)
  120. else:
  121. out_like = int(out_like_str.replace(",", ""))
  122. out_user_dict = {
  123. "out_follow": out_follow,
  124. "out_fans": out_fans,
  125. "out_like": out_like,
  126. "out_avatar_url": out_avatar_url,
  127. }
  128. # for k, v in out_user_dict.items():
  129. # print(f"{k}:{v}")
  130. return out_user_dict
  131. except Exception as e:
  132. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  133. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  134. @classmethod
  135. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  136. try:
  137. while True:
  138. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  139. if user_sheet is None:
  140. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  141. continue
  142. our_user_list = []
  143. for i in range(1, len(user_sheet)):
  144. # for i in range(428, len(user_sheet)):
  145. out_uid = user_sheet[i][2]
  146. user_name = user_sheet[i][3]
  147. our_uid = user_sheet[i][6]
  148. our_user_link = user_sheet[i][7]
  149. if out_uid is None or user_name is None:
  150. Common.logger(log_type, crawler).info("空行\n")
  151. else:
  152. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  153. if our_uid is None:
  154. try:
  155. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  156. except Exception as e:
  157. continue
  158. out_user_dict = {
  159. "out_uid": out_uid,
  160. "user_name": user_name,
  161. "out_avatar_url": out_user_info["out_avatar_url"],
  162. "out_create_time": '',
  163. "out_tag": '',
  164. "out_play_cnt": 0,
  165. "out_fans": out_user_info["out_fans"],
  166. "out_follow": out_user_info["out_follow"],
  167. "out_friend": 0,
  168. "out_like": out_user_info["out_like"],
  169. "platform": cls.platform,
  170. "tag": cls.tag,
  171. }
  172. our_user_dict = getUser.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  173. our_uid = our_user_dict['our_uid']
  174. our_user_link = our_user_dict['our_user_link']
  175. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  176. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  177. our_user_list.append(our_user_dict)
  178. else:
  179. our_user_dict = {
  180. 'out_uid': out_uid,
  181. 'user_name': user_name,
  182. 'our_uid': our_uid,
  183. 'our_user_link': our_user_link,
  184. }
  185. our_user_list.append(our_user_dict)
  186. return our_user_list
  187. except Exception as e:
  188. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  189. @classmethod
  190. def random_signature(cls):
  191. src_digits = string.digits # string_数字
  192. src_uppercase = string.ascii_uppercase # string_大写字母
  193. src_lowercase = string.ascii_lowercase # string_小写字母
  194. digits_num = random.randint(1, 6)
  195. uppercase_num = random.randint(1, 26 - digits_num - 1)
  196. lowercase_num = 26 - (digits_num + uppercase_num)
  197. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  198. src_lowercase, lowercase_num)
  199. random.shuffle(password)
  200. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  201. new_password_start = new_password[0:18]
  202. new_password_end = new_password[-7:]
  203. if new_password[18] == '8':
  204. new_password = new_password_start + 'w' + new_password_end
  205. elif new_password[18] == '9':
  206. new_password = new_password_start + 'x' + new_password_end
  207. elif new_password[18] == '-':
  208. new_password = new_password_start + 'y' + new_password_end
  209. elif new_password[18] == '.':
  210. new_password = new_password_start + 'z' + new_password_end
  211. else:
  212. new_password = new_password_start + 'y' + new_password_end
  213. return new_password
  214. @classmethod
  215. def get_signature(cls, log_type, crawler, out_uid, machine):
  216. try:
  217. # 打印请求配置
  218. ca = DesiredCapabilities.CHROME
  219. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  220. # 不打开浏览器运行
  221. chrome_options = webdriver.ChromeOptions()
  222. chrome_options.add_argument("--headless")
  223. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  224. chrome_options.add_argument("--no-sandbox")
  225. # driver初始化
  226. if machine == 'aliyun' or machine == 'aliyun_hk':
  227. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  228. elif machine == 'macpro':
  229. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  230. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  231. elif machine == 'macair':
  232. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  233. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  234. else:
  235. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  236. driver.implicitly_wait(10)
  237. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  238. time.sleep(3)
  239. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  240. signature = data_src.split("x-signature=")[-1]
  241. return signature
  242. except Exception as e:
  243. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  244. # 获取视频详情
  245. @classmethod
  246. def get_video_url(cls, log_type, crawler, gid):
  247. try:
  248. url = 'https://www.ixigua.com/api/mixVideo/information?'
  249. headers = {
  250. "accept-encoding": "gzip, deflate",
  251. "accept-language": "zh-CN,zh-Hans;q=0.9",
  252. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  253. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  254. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  255. }
  256. params = {
  257. 'mixId': gid,
  258. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  259. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  260. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  261. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  262. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  263. }
  264. cookies = {
  265. 'ixigua-a-s': '1',
  266. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  267. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  268. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  269. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  270. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  271. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  272. '__ac_nonce': '06304878000964fdad287',
  273. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  274. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  275. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  276. '_tea_utm_cache_1300': 'undefined',
  277. 'support_avif': 'false',
  278. 'support_webp': 'false',
  279. 'xiguavideopcwebid': '7134967546256016900',
  280. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  281. }
  282. urllib3.disable_warnings()
  283. s = requests.session()
  284. # max_retries=3 重试3次
  285. s.mount('http://', HTTPAdapter(max_retries=3))
  286. s.mount('https://', HTTPAdapter(max_retries=3))
  287. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies(), timeout=5)
  288. response.close()
  289. if 'data' not in response.json() or response.json()['data'] == '':
  290. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  291. else:
  292. video_info = response.json()['data']['gidInformation']['packerData']['video']
  293. video_url_dict = {}
  294. # video_url
  295. if 'videoResource' not in video_info:
  296. video_url_dict["video_url"] = ''
  297. video_url_dict["audio_url"] = ''
  298. video_url_dict["video_width"] = 0
  299. video_url_dict["video_height"] = 0
  300. elif 'dash_120fps' in video_info['videoResource']:
  301. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  302. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  303. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  304. if len(video_url) % 3 == 1:
  305. video_url += '=='
  306. elif len(video_url) % 3 == 2:
  307. video_url += '='
  308. elif len(audio_url) % 3 == 1:
  309. audio_url += '=='
  310. elif len(audio_url) % 3 == 2:
  311. audio_url += '='
  312. video_url = base64.b64decode(video_url).decode('utf8')
  313. audio_url = base64.b64decode(audio_url).decode('utf8')
  314. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  315. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  316. video_url_dict["video_url"] = video_url
  317. video_url_dict["audio_url"] = audio_url
  318. video_url_dict["video_width"] = video_width
  319. video_url_dict["video_height"] = video_height
  320. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  321. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  322. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  323. if len(video_url) % 3 == 1:
  324. video_url += '=='
  325. elif len(video_url) % 3 == 2:
  326. video_url += '='
  327. elif len(audio_url) % 3 == 1:
  328. audio_url += '=='
  329. elif len(audio_url) % 3 == 2:
  330. audio_url += '='
  331. video_url = base64.b64decode(video_url).decode('utf8')
  332. audio_url = base64.b64decode(audio_url).decode('utf8')
  333. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  334. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  335. video_url_dict["video_url"] = video_url
  336. video_url_dict["audio_url"] = audio_url
  337. video_url_dict["video_width"] = video_width
  338. video_url_dict["video_height"] = video_height
  339. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  340. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  341. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  342. if len(video_url) % 3 == 1:
  343. video_url += '=='
  344. elif len(video_url) % 3 == 2:
  345. video_url += '='
  346. elif len(audio_url) % 3 == 1:
  347. audio_url += '=='
  348. elif len(audio_url) % 3 == 2:
  349. audio_url += '='
  350. video_url = base64.b64decode(video_url).decode('utf8')
  351. audio_url = base64.b64decode(audio_url).decode('utf8')
  352. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  353. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  354. video_url_dict["video_url"] = video_url
  355. video_url_dict["audio_url"] = audio_url
  356. video_url_dict["video_width"] = video_width
  357. video_url_dict["video_height"] = video_height
  358. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  359. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  360. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  361. if len(video_url) % 3 == 1:
  362. video_url += '=='
  363. elif len(video_url) % 3 == 2:
  364. video_url += '='
  365. elif len(audio_url) % 3 == 1:
  366. audio_url += '=='
  367. elif len(audio_url) % 3 == 2:
  368. audio_url += '='
  369. video_url = base64.b64decode(video_url).decode('utf8')
  370. audio_url = base64.b64decode(audio_url).decode('utf8')
  371. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  372. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  373. video_url_dict["video_url"] = video_url
  374. video_url_dict["audio_url"] = audio_url
  375. video_url_dict["video_width"] = video_width
  376. video_url_dict["video_height"] = video_height
  377. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  378. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  379. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  380. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  381. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  382. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  383. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  384. if len(video_url) % 3 == 1:
  385. video_url += '=='
  386. elif len(video_url) % 3 == 2:
  387. video_url += '='
  388. elif len(audio_url) % 3 == 1:
  389. audio_url += '=='
  390. elif len(audio_url) % 3 == 2:
  391. audio_url += '='
  392. video_url = base64.b64decode(video_url).decode('utf8')
  393. audio_url = base64.b64decode(audio_url).decode('utf8')
  394. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  395. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  396. video_url_dict["video_url"] = video_url
  397. video_url_dict["audio_url"] = audio_url
  398. video_url_dict["video_width"] = video_width
  399. video_url_dict["video_height"] = video_height
  400. else:
  401. video_url_dict["video_url"] = ''
  402. video_url_dict["audio_url"] = ''
  403. video_url_dict["video_width"] = 0
  404. video_url_dict["video_height"] = 0
  405. elif 'dash' in video_info['videoResource']:
  406. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  407. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  408. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  409. if len(video_url) % 3 == 1:
  410. video_url += '=='
  411. elif len(video_url) % 3 == 2:
  412. video_url += '='
  413. elif len(audio_url) % 3 == 1:
  414. audio_url += '=='
  415. elif len(audio_url) % 3 == 2:
  416. audio_url += '='
  417. video_url = base64.b64decode(video_url).decode('utf8')
  418. audio_url = base64.b64decode(audio_url).decode('utf8')
  419. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  420. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  421. video_url_dict["video_url"] = video_url
  422. video_url_dict["audio_url"] = audio_url
  423. video_url_dict["video_width"] = video_width
  424. video_url_dict["video_height"] = video_height
  425. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  426. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  427. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  428. if len(video_url) % 3 == 1:
  429. video_url += '=='
  430. elif len(video_url) % 3 == 2:
  431. video_url += '='
  432. elif len(audio_url) % 3 == 1:
  433. audio_url += '=='
  434. elif len(audio_url) % 3 == 2:
  435. audio_url += '='
  436. video_url = base64.b64decode(video_url).decode('utf8')
  437. audio_url = base64.b64decode(audio_url).decode('utf8')
  438. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  439. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  440. video_url_dict["video_url"] = video_url
  441. video_url_dict["audio_url"] = audio_url
  442. video_url_dict["video_width"] = video_width
  443. video_url_dict["video_height"] = video_height
  444. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  445. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  446. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  447. if len(video_url) % 3 == 1:
  448. video_url += '=='
  449. elif len(video_url) % 3 == 2:
  450. video_url += '='
  451. elif len(audio_url) % 3 == 1:
  452. audio_url += '=='
  453. elif len(audio_url) % 3 == 2:
  454. audio_url += '='
  455. video_url = base64.b64decode(video_url).decode('utf8')
  456. audio_url = base64.b64decode(audio_url).decode('utf8')
  457. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  458. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  459. video_url_dict["video_url"] = video_url
  460. video_url_dict["audio_url"] = audio_url
  461. video_url_dict["video_width"] = video_width
  462. video_url_dict["video_height"] = video_height
  463. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  464. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  465. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  466. if len(video_url) % 3 == 1:
  467. video_url += '=='
  468. elif len(video_url) % 3 == 2:
  469. video_url += '='
  470. elif len(audio_url) % 3 == 1:
  471. audio_url += '=='
  472. elif len(audio_url) % 3 == 2:
  473. audio_url += '='
  474. video_url = base64.b64decode(video_url).decode('utf8')
  475. audio_url = base64.b64decode(audio_url).decode('utf8')
  476. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  477. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  478. video_url_dict["video_url"] = video_url
  479. video_url_dict["audio_url"] = audio_url
  480. video_url_dict["video_width"] = video_width
  481. video_url_dict["video_height"] = video_height
  482. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  483. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  484. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  485. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  486. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  487. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  488. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  489. if len(video_url) % 3 == 1:
  490. video_url += '=='
  491. elif len(video_url) % 3 == 2:
  492. video_url += '='
  493. elif len(audio_url) % 3 == 1:
  494. audio_url += '=='
  495. elif len(audio_url) % 3 == 2:
  496. audio_url += '='
  497. video_url = base64.b64decode(video_url).decode('utf8')
  498. audio_url = base64.b64decode(audio_url).decode('utf8')
  499. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  500. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  501. video_url_dict["video_url"] = video_url
  502. video_url_dict["audio_url"] = audio_url
  503. video_url_dict["video_width"] = video_width
  504. video_url_dict["video_height"] = video_height
  505. else:
  506. video_url_dict["video_url"] = ''
  507. video_url_dict["audio_url"] = ''
  508. video_url_dict["video_width"] = 0
  509. video_url_dict["video_height"] = 0
  510. elif 'normal' in video_info['videoResource']:
  511. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  512. video_info['videoResource']['normal']['video_list']:
  513. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  514. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  515. if len(video_url) % 3 == 1:
  516. video_url += '=='
  517. elif len(video_url) % 3 == 2:
  518. video_url += '='
  519. elif len(audio_url) % 3 == 1:
  520. audio_url += '=='
  521. elif len(audio_url) % 3 == 2:
  522. audio_url += '='
  523. video_url = base64.b64decode(video_url).decode('utf8')
  524. audio_url = base64.b64decode(audio_url).decode('utf8')
  525. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  526. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  527. video_url_dict["video_url"] = video_url
  528. video_url_dict["audio_url"] = audio_url
  529. video_url_dict["video_width"] = video_width
  530. video_url_dict["video_height"] = video_height
  531. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  532. video_info['videoResource']['normal']['video_list']:
  533. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  534. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  535. if len(video_url) % 3 == 1:
  536. video_url += '=='
  537. elif len(video_url) % 3 == 2:
  538. video_url += '='
  539. elif len(audio_url) % 3 == 1:
  540. audio_url += '=='
  541. elif len(audio_url) % 3 == 2:
  542. audio_url += '='
  543. video_url = base64.b64decode(video_url).decode('utf8')
  544. audio_url = base64.b64decode(audio_url).decode('utf8')
  545. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  546. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  547. video_url_dict["video_url"] = video_url
  548. video_url_dict["audio_url"] = audio_url
  549. video_url_dict["video_width"] = video_width
  550. video_url_dict["video_height"] = video_height
  551. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  552. video_info['videoResource']['normal']['video_list']:
  553. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  554. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  555. if len(video_url) % 3 == 1:
  556. video_url += '=='
  557. elif len(video_url) % 3 == 2:
  558. video_url += '='
  559. elif len(audio_url) % 3 == 1:
  560. audio_url += '=='
  561. elif len(audio_url) % 3 == 2:
  562. audio_url += '='
  563. video_url = base64.b64decode(video_url).decode('utf8')
  564. audio_url = base64.b64decode(audio_url).decode('utf8')
  565. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  566. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  567. video_url_dict["video_url"] = video_url
  568. video_url_dict["audio_url"] = audio_url
  569. video_url_dict["video_width"] = video_width
  570. video_url_dict["video_height"] = video_height
  571. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  572. video_info['videoResource']['normal']['video_list']:
  573. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  574. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  575. if len(video_url) % 3 == 1:
  576. video_url += '=='
  577. elif len(video_url) % 3 == 2:
  578. video_url += '='
  579. elif len(audio_url) % 3 == 1:
  580. audio_url += '=='
  581. elif len(audio_url) % 3 == 2:
  582. audio_url += '='
  583. video_url = base64.b64decode(video_url).decode('utf8')
  584. audio_url = base64.b64decode(audio_url).decode('utf8')
  585. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  586. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  587. video_url_dict["video_url"] = video_url
  588. video_url_dict["audio_url"] = audio_url
  589. video_url_dict["video_width"] = video_width
  590. video_url_dict["video_height"] = video_height
  591. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  592. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  593. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  594. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  595. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  596. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  597. 'backup_url_1']
  598. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  599. 'backup_url_1']
  600. if len(video_url) % 3 == 1:
  601. video_url += '=='
  602. elif len(video_url) % 3 == 2:
  603. video_url += '='
  604. elif len(audio_url) % 3 == 1:
  605. audio_url += '=='
  606. elif len(audio_url) % 3 == 2:
  607. audio_url += '='
  608. video_url = base64.b64decode(video_url).decode('utf8')
  609. audio_url = base64.b64decode(audio_url).decode('utf8')
  610. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  611. 'vwidth']
  612. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  613. 'vheight']
  614. video_url_dict["video_url"] = video_url
  615. video_url_dict["audio_url"] = audio_url
  616. video_url_dict["video_width"] = video_width
  617. video_url_dict["video_height"] = video_height
  618. else:
  619. video_url_dict["video_url"] = ''
  620. video_url_dict["audio_url"] = ''
  621. video_url_dict["video_width"] = 0
  622. video_url_dict["video_height"] = 0
  623. else:
  624. video_url_dict["video_url"] = ''
  625. video_url_dict["audio_url"] = ''
  626. video_url_dict["video_width"] = 0
  627. video_url_dict["video_height"] = 0
  628. return video_url_dict
  629. except Exception as e:
  630. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  631. @classmethod
  632. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  633. try:
  634. signature = cls.random_signature()
  635. while True:
  636. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  637. params = {
  638. 'to_user_id': str(out_uid),
  639. 'offset': str(cls.offset),
  640. 'limit': '30',
  641. 'maxBehotTime': '0',
  642. 'order': 'new',
  643. 'isHome': '0',
  644. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  645. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  646. '_signature': signature,
  647. }
  648. headers = {
  649. # 'authority': 'www.ixigua.com',
  650. # 'accept': 'application/json, text/plain, */*',
  651. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  652. # 'cache-control': 'no-cache',
  653. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  654. # 'pragma': 'no-cache',
  655. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  656. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  657. # 'sec-ch-ua-mobile': '?0',
  658. # 'sec-ch-ua-platform': '"macOS"',
  659. # 'sec-fetch-dest': 'empty',
  660. # 'sec-fetch-mode': 'cors',
  661. # 'sec-fetch-site': 'same-origin',
  662. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  663. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  664. }
  665. urllib3.disable_warnings()
  666. s = requests.session()
  667. # max_retries=3 重试3次
  668. s.mount('http://', HTTPAdapter(max_retries=3))
  669. s.mount('https://', HTTPAdapter(max_retries=3))
  670. response = s.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False, timeout=5)
  671. response.close()
  672. cls.offset += 30
  673. if response.status_code != 200:
  674. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  675. cls.offset = 0
  676. return
  677. elif 'data' not in response.text:
  678. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  679. cls.offset = 0
  680. return
  681. elif 'videoList' not in response.json()["data"]:
  682. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  683. cls.offset = 0
  684. return
  685. else:
  686. videoList = response.json()['data']['videoList']
  687. for i in range(len(videoList)):
  688. # video_title
  689. if 'title' not in videoList[i]:
  690. video_title = 0
  691. else:
  692. video_title = videoList[i]['title'].strip().replace('手游', '') \
  693. .replace('/', '').replace('\/', '').replace('\n', '')
  694. # video_id
  695. if 'video_id' not in videoList[i]:
  696. video_id = 0
  697. else:
  698. video_id = videoList[i]['video_id']
  699. # gid
  700. if 'gid' not in videoList[i]:
  701. gid = 0
  702. else:
  703. gid = videoList[i]['gid']
  704. # play_cnt
  705. if 'video_detail_info' not in videoList[i]:
  706. play_cnt = 0
  707. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  708. play_cnt = 0
  709. else:
  710. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  711. # comment_cnt
  712. if 'comment_count' not in videoList[i]:
  713. comment_cnt = 0
  714. else:
  715. comment_cnt = videoList[i]['comment_count']
  716. # like_cnt
  717. if 'digg_count' not in videoList[i]:
  718. like_cnt = 0
  719. else:
  720. like_cnt = videoList[i]['digg_count']
  721. # share_cnt
  722. share_cnt = 0
  723. # video_duration
  724. if 'video_duration' not in videoList[i]:
  725. video_duration = 0
  726. else:
  727. video_duration = int(videoList[i]['video_duration'])
  728. # send_time
  729. if 'publish_time' not in videoList[i]:
  730. publish_time = 0
  731. else:
  732. publish_time = videoList[i]['publish_time']
  733. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  734. # is_top
  735. if 'is_top' not in videoList[i]:
  736. is_top = 0
  737. else:
  738. is_top = videoList[i]['is_top']
  739. # user_name
  740. if 'user_info' not in videoList[i]:
  741. user_name = 0
  742. elif 'name' not in videoList[i]['user_info']:
  743. user_name = 0
  744. else:
  745. user_name = videoList[i]['user_info']['name']
  746. # user_id
  747. if 'user_info' not in videoList[i]:
  748. user_id = 0
  749. elif 'user_id' not in videoList[i]['user_info']:
  750. user_id = 0
  751. else:
  752. user_id = videoList[i]['user_info']['user_id']
  753. # avatar_url
  754. if 'user_info' not in videoList[i]:
  755. avatar_url = 0
  756. elif 'avatar_url' not in videoList[i]['user_info']:
  757. avatar_url = 0
  758. else:
  759. avatar_url = videoList[i]['user_info']['avatar_url']
  760. # cover_url
  761. if 'video_detail_info' not in videoList[i]:
  762. cover_url = 0
  763. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  764. cover_url = 0
  765. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  766. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  767. else:
  768. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  769. while True:
  770. rule_dict = cls.get_rule(log_type, crawler)
  771. if rule_dict is None:
  772. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  773. time.sleep(10)
  774. else:
  775. break
  776. if gid == 0 or video_id == 0 or cover_url == 0:
  777. Common.logger(log_type, crawler).info('无效视频\n')
  778. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  779. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  780. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  781. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  782. cls.offset = 0
  783. return
  784. else:
  785. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  786. video_url = video_url_dict["video_url"]
  787. audio_url = video_url_dict["audio_url"]
  788. video_width = video_url_dict["video_width"]
  789. video_height = video_url_dict["video_height"]
  790. video_dict = {'video_title': video_title,
  791. 'video_id': video_id,
  792. 'gid': gid,
  793. 'play_cnt': play_cnt,
  794. 'comment_cnt': comment_cnt,
  795. 'like_cnt': like_cnt,
  796. 'share_cnt': share_cnt,
  797. 'video_width': video_width,
  798. 'video_height': video_height,
  799. 'duration': video_duration,
  800. 'publish_time_stamp': publish_time,
  801. 'publish_time_str': publish_time_str,
  802. 'is_top': is_top,
  803. 'user_name': user_name,
  804. 'user_id': user_id,
  805. 'avatar_url': avatar_url,
  806. 'cover_url': cover_url,
  807. 'audio_url': audio_url,
  808. 'video_url': video_url,
  809. 'session': signature}
  810. for k, v in video_dict.items():
  811. Common.logger(log_type, crawler).info(f"{k}:{v}")
  812. cls.download_publish(log_type=log_type,
  813. crawler=crawler,
  814. video_dict=video_dict,
  815. rule_dict=rule_dict,
  816. strategy=strategy,
  817. our_uid=our_uid,
  818. oss_endpoint=oss_endpoint,
  819. env=env,
  820. machine=machine)
  821. except Exception as e:
  822. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  823. @classmethod
  824. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  825. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  826. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  827. return len(repeat_video)
  828. # 下载 / 上传
  829. @classmethod
  830. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  831. try:
  832. if cls.download_rule(video_dict, rule_dict) is False:
  833. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  834. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  835. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  836. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  837. Common.logger(log_type, crawler).info('视频已下载\n')
  838. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  839. # Common.logger(log_type, crawler).info('视频已下载\n')
  840. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  841. # Common.logger(log_type, crawler).info('视频已下载\n')
  842. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  843. # Common.logger(log_type, crawler).info('视频已下载\n')
  844. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  845. # Common.logger(log_type, crawler).info('视频已存在\n')
  846. else:
  847. # 下载视频
  848. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  849. # 下载音频
  850. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  851. # 合成音视频
  852. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  853. md_title = md5(video_dict['video_title'].encode('utf8')).hexdigest()
  854. if os.path.getsize(f"./{crawler}/videos/{video_dict['video_title']}/video.mp4") == 0:
  855. # 删除视频文件夹
  856. shutil.rmtree(f"./{crawler}/videos/{md_title}")
  857. Common.logger(log_type, crawler).info("视频size=0,删除成功\n")
  858. return
  859. # ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  860. # if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  861. # Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  862. # # 删除视频文件夹
  863. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  864. # return
  865. # 下载封面
  866. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  867. # 保存视频信息至txt
  868. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  869. # 上传视频
  870. Common.logger(log_type, crawler).info("开始上传视频...")
  871. our_video_id = Publish.upload_and_publish(log_type=log_type,
  872. crawler=crawler,
  873. strategy=strategy,
  874. our_uid=our_uid,
  875. env=env,
  876. oss_endpoint=oss_endpoint)
  877. if env == 'dev':
  878. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  879. else:
  880. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  881. Common.logger(log_type, crawler).info("视频上传完成")
  882. if our_video_id is None:
  883. # 删除视频文件夹
  884. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  885. return
  886. # 视频写入飞书
  887. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  888. upload_time = int(time.time())
  889. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  890. "定向榜",
  891. video_dict['video_title'],
  892. str(video_dict['video_id']),
  893. our_video_link,
  894. video_dict['gid'],
  895. video_dict['play_cnt'],
  896. video_dict['comment_cnt'],
  897. video_dict['like_cnt'],
  898. video_dict['share_cnt'],
  899. video_dict['duration'],
  900. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  901. video_dict['publish_time_str'],
  902. video_dict['user_name'],
  903. video_dict['user_id'],
  904. video_dict['avatar_url'],
  905. video_dict['cover_url'],
  906. video_dict['video_url'],
  907. video_dict['audio_url']]]
  908. time.sleep(1)
  909. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  910. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  911. # 视频信息保存数据库
  912. insert_sql = f""" insert into crawler_video(video_id,
  913. user_id,
  914. out_user_id,
  915. platform,
  916. strategy,
  917. out_video_id,
  918. video_title,
  919. cover_url,
  920. video_url,
  921. duration,
  922. publish_time,
  923. play_cnt,
  924. crawler_rule,
  925. width,
  926. height)
  927. values({our_video_id},
  928. {our_uid},
  929. "{video_dict['user_id']}",
  930. "{cls.platform}",
  931. "定向爬虫策略",
  932. "{video_dict['video_id']}",
  933. "{video_dict['video_title']}",
  934. "{video_dict['cover_url']}",
  935. "{video_dict['video_url']}",
  936. {int(video_dict['duration'])},
  937. "{video_dict['publish_time_str']}",
  938. {int(video_dict['play_cnt'])},
  939. '{json.dumps(rule_dict)}',
  940. {int(video_dict['video_width'])},
  941. {int(video_dict['video_height'])}) """
  942. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  943. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  944. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  945. except Exception as e:
  946. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  947. @classmethod
  948. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  949. try:
  950. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  951. for user in user_list:
  952. out_uid = user["out_uid"]
  953. user_name = user["user_name"]
  954. our_uid = user["our_uid"]
  955. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  956. cls.get_videolist(log_type=log_type,
  957. crawler=crawler,
  958. strategy=strategy,
  959. our_uid=our_uid,
  960. out_uid=out_uid,
  961. oss_endpoint=oss_endpoint,
  962. env=env,
  963. machine=machine)
  964. cls.offset = 0
  965. time.sleep(1)
  966. except Exception as e:
  967. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  968. if __name__ == '__main__':
  969. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  970. # Follow.get_videolist(log_type="follow",
  971. # crawler="xigua",
  972. # strategy="定向爬虫策略",
  973. # our_uid="6267141",
  974. # out_uid="95420624045",
  975. # oss_endpoint="out",
  976. # env="dev",
  977. # machine="local")
  978. # print(Follow.random_signature())
  979. # rule = Follow.get_rule("follow", "xigua")
  980. # print(type(rule))
  981. # print(type(json.dumps(rule)))
  982. # print(json.dumps(rule))
  983. Follow.get_user_list("follow", "xigua", "5tlTYB", "prod", "local")
  984. pass