xigua_follow.py 61 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. import requests
  13. import urllib3
  14. from requests.adapters import HTTPAdapter
  15. from selenium.webdriver import DesiredCapabilities
  16. from selenium.webdriver.chrome.service import Service
  17. from selenium.webdriver.common.by import By
  18. from selenium import webdriver
  19. from lxml import etree
  20. sys.path.append(os.getcwd())
  21. from common.db import MysqlHelper
  22. from common.users import Users
  23. from common.common import Common
  24. from common.feishu import Feishu
  25. from common.publish import Publish
  26. class Follow:
  27. # 个人主页视频翻页参数
  28. offset = 0
  29. platform = "西瓜视频"
  30. tag = "西瓜视频爬虫,定向爬虫策略"
  31. @classmethod
  32. def get_rule(cls, log_type, crawler):
  33. try:
  34. while True:
  35. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  36. if rule_sheet is None:
  37. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  38. time.sleep(10)
  39. continue
  40. rule_dict = {
  41. "play_cnt": int(rule_sheet[1][2]),
  42. "comment_cnt": int(rule_sheet[2][2]),
  43. "like_cnt": int(rule_sheet[3][2]),
  44. "duration": int(rule_sheet[4][2]),
  45. "publish_time": int(rule_sheet[5][2]),
  46. "video_width": int(rule_sheet[6][2]),
  47. "video_height": int(rule_sheet[7][2]),
  48. }
  49. return rule_dict
  50. except Exception as e:
  51. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  52. # 下载规则
  53. @classmethod
  54. def download_rule(cls, video_info_dict, rule_dict):
  55. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  56. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  57. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  58. if video_info_dict['duration'] >= rule_dict['duration']:
  59. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  60. or video_info_dict['video_height'] >= rule_dict['video_height']:
  61. return True
  62. else:
  63. return False
  64. else:
  65. return False
  66. else:
  67. return False
  68. else:
  69. return False
  70. else:
  71. return False
  72. # 过滤词库
  73. @classmethod
  74. def filter_words(cls, log_type, crawler):
  75. try:
  76. while True:
  77. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  78. if filter_words_sheet is None:
  79. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  80. continue
  81. filter_words_list = []
  82. for x in filter_words_sheet:
  83. for y in x:
  84. if y is None:
  85. pass
  86. else:
  87. filter_words_list.append(y)
  88. return filter_words_list
  89. except Exception as e:
  90. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  91. @classmethod
  92. def get_out_user_info(cls, log_type, crawler, out_uid):
  93. try:
  94. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  95. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  96. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  97. url = f"https://www.ixigua.com/home/{out_uid}"
  98. urllib3.disable_warnings()
  99. s = requests.session()
  100. # max_retries=3 重试3次
  101. s.mount('http://', HTTPAdapter(max_retries=3))
  102. s.mount('https://', HTTPAdapter(max_retries=3))
  103. response = s.get(url=url, headers=headers, proxies=Common.tunnel_proxies(), verify=False, timeout=5).text
  104. html = etree.HTML(response)
  105. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  106. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  107. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  108. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  109. if "万" in out_follow_str:
  110. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  111. else:
  112. out_follow = int(out_follow_str.replace(",", ""))
  113. if "万" in out_fans_str:
  114. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  115. else:
  116. out_fans = int(out_fans_str.replace(",", ""))
  117. if "万" in out_like_str:
  118. out_like = int(float(out_like_str.split("万")[0])*10000)
  119. else:
  120. out_like = int(out_like_str.replace(",", ""))
  121. out_user_dict = {
  122. "out_follow": out_follow,
  123. "out_fans": out_fans,
  124. "out_like": out_like,
  125. "out_avatar_url": out_avatar_url,
  126. }
  127. # for k, v in out_user_dict.items():
  128. # print(f"{k}:{v}")
  129. return out_user_dict
  130. except Exception as e:
  131. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  132. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  133. @classmethod
  134. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  135. try:
  136. while True:
  137. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  138. if user_sheet is None:
  139. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  140. continue
  141. our_user_list = []
  142. # for i in range(1, len(user_sheet)):
  143. for i in range(428, len(user_sheet)):
  144. out_uid = user_sheet[i][2]
  145. user_name = user_sheet[i][3]
  146. our_uid = user_sheet[i][6]
  147. our_user_link = user_sheet[i][7]
  148. if out_uid is None or user_name is None:
  149. Common.logger(log_type, crawler).info("空行\n")
  150. else:
  151. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  152. if our_uid is None:
  153. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  154. out_user_dict = {
  155. "out_uid": out_uid,
  156. "user_name": user_name,
  157. "out_avatar_url": out_user_info["out_avatar_url"],
  158. "out_create_time": '',
  159. "out_tag": '',
  160. "out_play_cnt": 0,
  161. "out_fans": out_user_info["out_fans"],
  162. "out_follow": out_user_info["out_follow"],
  163. "out_friend": 0,
  164. "out_like": out_user_info["out_like"],
  165. "platform": cls.platform,
  166. "tag": cls.tag,
  167. }
  168. our_user_dict = Users.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  169. our_uid = our_user_dict['our_uid']
  170. our_user_link = our_user_dict['our_user_link']
  171. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  172. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  173. our_user_list.append(our_user_dict)
  174. else:
  175. our_user_dict = {
  176. 'out_uid': out_uid,
  177. 'user_name': user_name,
  178. 'our_uid': our_uid,
  179. 'our_user_link': our_user_link,
  180. }
  181. our_user_list.append(our_user_dict)
  182. return our_user_list
  183. except Exception as e:
  184. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  185. @classmethod
  186. def random_signature(cls):
  187. src_digits = string.digits # string_数字
  188. src_uppercase = string.ascii_uppercase # string_大写字母
  189. src_lowercase = string.ascii_lowercase # string_小写字母
  190. digits_num = random.randint(1, 6)
  191. uppercase_num = random.randint(1, 26 - digits_num - 1)
  192. lowercase_num = 26 - (digits_num + uppercase_num)
  193. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  194. src_lowercase, lowercase_num)
  195. random.shuffle(password)
  196. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  197. new_password_start = new_password[0:18]
  198. new_password_end = new_password[-7:]
  199. if new_password[18] == '8':
  200. new_password = new_password_start + 'w' + new_password_end
  201. elif new_password[18] == '9':
  202. new_password = new_password_start + 'x' + new_password_end
  203. elif new_password[18] == '-':
  204. new_password = new_password_start + 'y' + new_password_end
  205. elif new_password[18] == '.':
  206. new_password = new_password_start + 'z' + new_password_end
  207. else:
  208. new_password = new_password_start + 'y' + new_password_end
  209. return new_password
  210. @classmethod
  211. def get_signature(cls, log_type, crawler, out_uid, machine):
  212. try:
  213. # 打印请求配置
  214. ca = DesiredCapabilities.CHROME
  215. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  216. # 不打开浏览器运行
  217. chrome_options = webdriver.ChromeOptions()
  218. chrome_options.add_argument("--headless")
  219. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  220. chrome_options.add_argument("--no-sandbox")
  221. # driver初始化
  222. if machine == 'aliyun' or machine == 'aliyun_hk':
  223. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  224. elif machine == 'macpro':
  225. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  226. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  227. elif machine == 'macair':
  228. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  229. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  230. else:
  231. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  232. driver.implicitly_wait(10)
  233. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  234. time.sleep(3)
  235. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  236. signature = data_src.split("x-signature=")[-1]
  237. return signature
  238. except Exception as e:
  239. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  240. # 获取视频详情
  241. @classmethod
  242. def get_video_url(cls, log_type, crawler, gid):
  243. try:
  244. url = 'https://www.ixigua.com/api/mixVideo/information?'
  245. headers = {
  246. "accept-encoding": "gzip, deflate",
  247. "accept-language": "zh-CN,zh-Hans;q=0.9",
  248. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  249. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  250. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  251. }
  252. params = {
  253. 'mixId': gid,
  254. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  255. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  256. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  257. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  258. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  259. }
  260. cookies = {
  261. 'ixigua-a-s': '1',
  262. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  263. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  264. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  265. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  266. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  267. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  268. '__ac_nonce': '06304878000964fdad287',
  269. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  270. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  271. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  272. '_tea_utm_cache_1300': 'undefined',
  273. 'support_avif': 'false',
  274. 'support_webp': 'false',
  275. 'xiguavideopcwebid': '7134967546256016900',
  276. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  277. }
  278. urllib3.disable_warnings()
  279. s = requests.session()
  280. # max_retries=3 重试3次
  281. s.mount('http://', HTTPAdapter(max_retries=3))
  282. s.mount('https://', HTTPAdapter(max_retries=3))
  283. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies(), timeout=5)
  284. response.close()
  285. if 'data' not in response.json() or response.json()['data'] == '':
  286. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  287. else:
  288. video_info = response.json()['data']['gidInformation']['packerData']['video']
  289. video_url_dict = {}
  290. # video_url
  291. if 'videoResource' not in video_info:
  292. video_url_dict["video_url"] = ''
  293. video_url_dict["audio_url"] = ''
  294. video_url_dict["video_width"] = 0
  295. video_url_dict["video_height"] = 0
  296. elif 'dash_120fps' in video_info['videoResource']:
  297. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  298. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  299. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  300. if len(video_url) % 3 == 1:
  301. video_url += '=='
  302. elif len(video_url) % 3 == 2:
  303. video_url += '='
  304. elif len(audio_url) % 3 == 1:
  305. audio_url += '=='
  306. elif len(audio_url) % 3 == 2:
  307. audio_url += '='
  308. video_url = base64.b64decode(video_url).decode('utf8')
  309. audio_url = base64.b64decode(audio_url).decode('utf8')
  310. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  311. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  312. video_url_dict["video_url"] = video_url
  313. video_url_dict["audio_url"] = audio_url
  314. video_url_dict["video_width"] = video_width
  315. video_url_dict["video_height"] = video_height
  316. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  317. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  318. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  319. if len(video_url) % 3 == 1:
  320. video_url += '=='
  321. elif len(video_url) % 3 == 2:
  322. video_url += '='
  323. elif len(audio_url) % 3 == 1:
  324. audio_url += '=='
  325. elif len(audio_url) % 3 == 2:
  326. audio_url += '='
  327. video_url = base64.b64decode(video_url).decode('utf8')
  328. audio_url = base64.b64decode(audio_url).decode('utf8')
  329. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  330. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  331. video_url_dict["video_url"] = video_url
  332. video_url_dict["audio_url"] = audio_url
  333. video_url_dict["video_width"] = video_width
  334. video_url_dict["video_height"] = video_height
  335. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  336. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  337. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  338. if len(video_url) % 3 == 1:
  339. video_url += '=='
  340. elif len(video_url) % 3 == 2:
  341. video_url += '='
  342. elif len(audio_url) % 3 == 1:
  343. audio_url += '=='
  344. elif len(audio_url) % 3 == 2:
  345. audio_url += '='
  346. video_url = base64.b64decode(video_url).decode('utf8')
  347. audio_url = base64.b64decode(audio_url).decode('utf8')
  348. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  349. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  350. video_url_dict["video_url"] = video_url
  351. video_url_dict["audio_url"] = audio_url
  352. video_url_dict["video_width"] = video_width
  353. video_url_dict["video_height"] = video_height
  354. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  355. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  356. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  357. if len(video_url) % 3 == 1:
  358. video_url += '=='
  359. elif len(video_url) % 3 == 2:
  360. video_url += '='
  361. elif len(audio_url) % 3 == 1:
  362. audio_url += '=='
  363. elif len(audio_url) % 3 == 2:
  364. audio_url += '='
  365. video_url = base64.b64decode(video_url).decode('utf8')
  366. audio_url = base64.b64decode(audio_url).decode('utf8')
  367. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  368. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  369. video_url_dict["video_url"] = video_url
  370. video_url_dict["audio_url"] = audio_url
  371. video_url_dict["video_width"] = video_width
  372. video_url_dict["video_height"] = video_height
  373. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  374. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  375. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  376. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  377. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  378. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  379. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  380. if len(video_url) % 3 == 1:
  381. video_url += '=='
  382. elif len(video_url) % 3 == 2:
  383. video_url += '='
  384. elif len(audio_url) % 3 == 1:
  385. audio_url += '=='
  386. elif len(audio_url) % 3 == 2:
  387. audio_url += '='
  388. video_url = base64.b64decode(video_url).decode('utf8')
  389. audio_url = base64.b64decode(audio_url).decode('utf8')
  390. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  391. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  392. video_url_dict["video_url"] = video_url
  393. video_url_dict["audio_url"] = audio_url
  394. video_url_dict["video_width"] = video_width
  395. video_url_dict["video_height"] = video_height
  396. else:
  397. video_url_dict["video_url"] = ''
  398. video_url_dict["audio_url"] = ''
  399. video_url_dict["video_width"] = 0
  400. video_url_dict["video_height"] = 0
  401. elif 'dash' in video_info['videoResource']:
  402. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  403. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  404. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  405. if len(video_url) % 3 == 1:
  406. video_url += '=='
  407. elif len(video_url) % 3 == 2:
  408. video_url += '='
  409. elif len(audio_url) % 3 == 1:
  410. audio_url += '=='
  411. elif len(audio_url) % 3 == 2:
  412. audio_url += '='
  413. video_url = base64.b64decode(video_url).decode('utf8')
  414. audio_url = base64.b64decode(audio_url).decode('utf8')
  415. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  416. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  417. video_url_dict["video_url"] = video_url
  418. video_url_dict["audio_url"] = audio_url
  419. video_url_dict["video_width"] = video_width
  420. video_url_dict["video_height"] = video_height
  421. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  422. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  423. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  424. if len(video_url) % 3 == 1:
  425. video_url += '=='
  426. elif len(video_url) % 3 == 2:
  427. video_url += '='
  428. elif len(audio_url) % 3 == 1:
  429. audio_url += '=='
  430. elif len(audio_url) % 3 == 2:
  431. audio_url += '='
  432. video_url = base64.b64decode(video_url).decode('utf8')
  433. audio_url = base64.b64decode(audio_url).decode('utf8')
  434. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  435. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  436. video_url_dict["video_url"] = video_url
  437. video_url_dict["audio_url"] = audio_url
  438. video_url_dict["video_width"] = video_width
  439. video_url_dict["video_height"] = video_height
  440. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  441. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  442. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  443. if len(video_url) % 3 == 1:
  444. video_url += '=='
  445. elif len(video_url) % 3 == 2:
  446. video_url += '='
  447. elif len(audio_url) % 3 == 1:
  448. audio_url += '=='
  449. elif len(audio_url) % 3 == 2:
  450. audio_url += '='
  451. video_url = base64.b64decode(video_url).decode('utf8')
  452. audio_url = base64.b64decode(audio_url).decode('utf8')
  453. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  454. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  455. video_url_dict["video_url"] = video_url
  456. video_url_dict["audio_url"] = audio_url
  457. video_url_dict["video_width"] = video_width
  458. video_url_dict["video_height"] = video_height
  459. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  460. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  461. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  462. if len(video_url) % 3 == 1:
  463. video_url += '=='
  464. elif len(video_url) % 3 == 2:
  465. video_url += '='
  466. elif len(audio_url) % 3 == 1:
  467. audio_url += '=='
  468. elif len(audio_url) % 3 == 2:
  469. audio_url += '='
  470. video_url = base64.b64decode(video_url).decode('utf8')
  471. audio_url = base64.b64decode(audio_url).decode('utf8')
  472. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  473. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  474. video_url_dict["video_url"] = video_url
  475. video_url_dict["audio_url"] = audio_url
  476. video_url_dict["video_width"] = video_width
  477. video_url_dict["video_height"] = video_height
  478. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  479. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  480. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  481. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  482. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  483. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  484. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  485. if len(video_url) % 3 == 1:
  486. video_url += '=='
  487. elif len(video_url) % 3 == 2:
  488. video_url += '='
  489. elif len(audio_url) % 3 == 1:
  490. audio_url += '=='
  491. elif len(audio_url) % 3 == 2:
  492. audio_url += '='
  493. video_url = base64.b64decode(video_url).decode('utf8')
  494. audio_url = base64.b64decode(audio_url).decode('utf8')
  495. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  496. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  497. video_url_dict["video_url"] = video_url
  498. video_url_dict["audio_url"] = audio_url
  499. video_url_dict["video_width"] = video_width
  500. video_url_dict["video_height"] = video_height
  501. else:
  502. video_url_dict["video_url"] = ''
  503. video_url_dict["audio_url"] = ''
  504. video_url_dict["video_width"] = 0
  505. video_url_dict["video_height"] = 0
  506. elif 'normal' in video_info['videoResource']:
  507. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  508. video_info['videoResource']['normal']['video_list']:
  509. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  510. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  511. if len(video_url) % 3 == 1:
  512. video_url += '=='
  513. elif len(video_url) % 3 == 2:
  514. video_url += '='
  515. elif len(audio_url) % 3 == 1:
  516. audio_url += '=='
  517. elif len(audio_url) % 3 == 2:
  518. audio_url += '='
  519. video_url = base64.b64decode(video_url).decode('utf8')
  520. audio_url = base64.b64decode(audio_url).decode('utf8')
  521. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  522. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  523. video_url_dict["video_url"] = video_url
  524. video_url_dict["audio_url"] = audio_url
  525. video_url_dict["video_width"] = video_width
  526. video_url_dict["video_height"] = video_height
  527. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  528. video_info['videoResource']['normal']['video_list']:
  529. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  530. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  531. if len(video_url) % 3 == 1:
  532. video_url += '=='
  533. elif len(video_url) % 3 == 2:
  534. video_url += '='
  535. elif len(audio_url) % 3 == 1:
  536. audio_url += '=='
  537. elif len(audio_url) % 3 == 2:
  538. audio_url += '='
  539. video_url = base64.b64decode(video_url).decode('utf8')
  540. audio_url = base64.b64decode(audio_url).decode('utf8')
  541. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  542. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  543. video_url_dict["video_url"] = video_url
  544. video_url_dict["audio_url"] = audio_url
  545. video_url_dict["video_width"] = video_width
  546. video_url_dict["video_height"] = video_height
  547. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  548. video_info['videoResource']['normal']['video_list']:
  549. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  550. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  551. if len(video_url) % 3 == 1:
  552. video_url += '=='
  553. elif len(video_url) % 3 == 2:
  554. video_url += '='
  555. elif len(audio_url) % 3 == 1:
  556. audio_url += '=='
  557. elif len(audio_url) % 3 == 2:
  558. audio_url += '='
  559. video_url = base64.b64decode(video_url).decode('utf8')
  560. audio_url = base64.b64decode(audio_url).decode('utf8')
  561. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  562. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  563. video_url_dict["video_url"] = video_url
  564. video_url_dict["audio_url"] = audio_url
  565. video_url_dict["video_width"] = video_width
  566. video_url_dict["video_height"] = video_height
  567. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  568. video_info['videoResource']['normal']['video_list']:
  569. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  570. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  571. if len(video_url) % 3 == 1:
  572. video_url += '=='
  573. elif len(video_url) % 3 == 2:
  574. video_url += '='
  575. elif len(audio_url) % 3 == 1:
  576. audio_url += '=='
  577. elif len(audio_url) % 3 == 2:
  578. audio_url += '='
  579. video_url = base64.b64decode(video_url).decode('utf8')
  580. audio_url = base64.b64decode(audio_url).decode('utf8')
  581. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  582. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  583. video_url_dict["video_url"] = video_url
  584. video_url_dict["audio_url"] = audio_url
  585. video_url_dict["video_width"] = video_width
  586. video_url_dict["video_height"] = video_height
  587. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  588. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  589. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  590. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  591. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  592. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  593. 'backup_url_1']
  594. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  595. 'backup_url_1']
  596. if len(video_url) % 3 == 1:
  597. video_url += '=='
  598. elif len(video_url) % 3 == 2:
  599. video_url += '='
  600. elif len(audio_url) % 3 == 1:
  601. audio_url += '=='
  602. elif len(audio_url) % 3 == 2:
  603. audio_url += '='
  604. video_url = base64.b64decode(video_url).decode('utf8')
  605. audio_url = base64.b64decode(audio_url).decode('utf8')
  606. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  607. 'vwidth']
  608. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  609. 'vheight']
  610. video_url_dict["video_url"] = video_url
  611. video_url_dict["audio_url"] = audio_url
  612. video_url_dict["video_width"] = video_width
  613. video_url_dict["video_height"] = video_height
  614. else:
  615. video_url_dict["video_url"] = ''
  616. video_url_dict["audio_url"] = ''
  617. video_url_dict["video_width"] = 0
  618. video_url_dict["video_height"] = 0
  619. else:
  620. video_url_dict["video_url"] = ''
  621. video_url_dict["audio_url"] = ''
  622. video_url_dict["video_width"] = 0
  623. video_url_dict["video_height"] = 0
  624. return video_url_dict
  625. except Exception as e:
  626. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  627. @classmethod
  628. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  629. try:
  630. signature = cls.random_signature()
  631. while True:
  632. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  633. params = {
  634. 'to_user_id': str(out_uid),
  635. 'offset': str(cls.offset),
  636. 'limit': '30',
  637. 'maxBehotTime': '0',
  638. 'order': 'new',
  639. 'isHome': '0',
  640. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  641. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  642. '_signature': signature,
  643. }
  644. headers = {
  645. # 'authority': 'www.ixigua.com',
  646. # 'accept': 'application/json, text/plain, */*',
  647. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  648. # 'cache-control': 'no-cache',
  649. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  650. # 'pragma': 'no-cache',
  651. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  652. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  653. # 'sec-ch-ua-mobile': '?0',
  654. # 'sec-ch-ua-platform': '"macOS"',
  655. # 'sec-fetch-dest': 'empty',
  656. # 'sec-fetch-mode': 'cors',
  657. # 'sec-fetch-site': 'same-origin',
  658. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  659. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  660. }
  661. urllib3.disable_warnings()
  662. s = requests.session()
  663. # max_retries=3 重试3次
  664. s.mount('http://', HTTPAdapter(max_retries=3))
  665. s.mount('https://', HTTPAdapter(max_retries=3))
  666. response = s.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False, timeout=5)
  667. response.close()
  668. cls.offset += 30
  669. if response.status_code != 200:
  670. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  671. cls.offset = 0
  672. return
  673. elif 'data' not in response.text:
  674. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  675. cls.offset = 0
  676. return
  677. elif 'videoList' not in response.json()["data"]:
  678. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  679. cls.offset = 0
  680. return
  681. else:
  682. videoList = response.json()['data']['videoList']
  683. for i in range(len(videoList)):
  684. # video_title
  685. if 'title' not in videoList[i]:
  686. video_title = 0
  687. else:
  688. video_title = videoList[i]['title'].strip().replace('手游', '') \
  689. .replace('/', '').replace('\/', '').replace('\n', '')
  690. # video_id
  691. if 'video_id' not in videoList[i]:
  692. video_id = 0
  693. else:
  694. video_id = videoList[i]['video_id']
  695. # gid
  696. if 'gid' not in videoList[i]:
  697. gid = 0
  698. else:
  699. gid = videoList[i]['gid']
  700. # play_cnt
  701. if 'video_detail_info' not in videoList[i]:
  702. play_cnt = 0
  703. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  704. play_cnt = 0
  705. else:
  706. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  707. # comment_cnt
  708. if 'comment_count' not in videoList[i]:
  709. comment_cnt = 0
  710. else:
  711. comment_cnt = videoList[i]['comment_count']
  712. # like_cnt
  713. if 'digg_count' not in videoList[i]:
  714. like_cnt = 0
  715. else:
  716. like_cnt = videoList[i]['digg_count']
  717. # share_cnt
  718. share_cnt = 0
  719. # video_duration
  720. if 'video_duration' not in videoList[i]:
  721. video_duration = 0
  722. else:
  723. video_duration = int(videoList[i]['video_duration'])
  724. # send_time
  725. if 'publish_time' not in videoList[i]:
  726. publish_time = 0
  727. else:
  728. publish_time = videoList[i]['publish_time']
  729. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  730. # is_top
  731. if 'is_top' not in videoList[i]:
  732. is_top = 0
  733. else:
  734. is_top = videoList[i]['is_top']
  735. # user_name
  736. if 'user_info' not in videoList[i]:
  737. user_name = 0
  738. elif 'name' not in videoList[i]['user_info']:
  739. user_name = 0
  740. else:
  741. user_name = videoList[i]['user_info']['name']
  742. # user_id
  743. if 'user_info' not in videoList[i]:
  744. user_id = 0
  745. elif 'user_id' not in videoList[i]['user_info']:
  746. user_id = 0
  747. else:
  748. user_id = videoList[i]['user_info']['user_id']
  749. # avatar_url
  750. if 'user_info' not in videoList[i]:
  751. avatar_url = 0
  752. elif 'avatar_url' not in videoList[i]['user_info']:
  753. avatar_url = 0
  754. else:
  755. avatar_url = videoList[i]['user_info']['avatar_url']
  756. # cover_url
  757. if 'video_detail_info' not in videoList[i]:
  758. cover_url = 0
  759. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  760. cover_url = 0
  761. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  762. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  763. else:
  764. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  765. while True:
  766. rule_dict = cls.get_rule(log_type, crawler)
  767. if rule_dict is None:
  768. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  769. time.sleep(10)
  770. else:
  771. break
  772. if gid == 0 or video_id == 0 or cover_url == 0:
  773. Common.logger(log_type, crawler).info('无效视频\n')
  774. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  775. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  776. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  777. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  778. cls.offset = 0
  779. return
  780. else:
  781. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  782. video_url = video_url_dict["video_url"]
  783. audio_url = video_url_dict["audio_url"]
  784. video_width = video_url_dict["video_width"]
  785. video_height = video_url_dict["video_height"]
  786. video_dict = {'video_title': video_title,
  787. 'video_id': video_id,
  788. 'gid': gid,
  789. 'play_cnt': play_cnt,
  790. 'comment_cnt': comment_cnt,
  791. 'like_cnt': like_cnt,
  792. 'share_cnt': share_cnt,
  793. 'video_width': video_width,
  794. 'video_height': video_height,
  795. 'duration': video_duration,
  796. 'publish_time_stamp': publish_time,
  797. 'publish_time_str': publish_time_str,
  798. 'is_top': is_top,
  799. 'user_name': user_name,
  800. 'user_id': user_id,
  801. 'avatar_url': avatar_url,
  802. 'cover_url': cover_url,
  803. 'audio_url': audio_url,
  804. 'video_url': video_url,
  805. 'session': signature}
  806. for k, v in video_dict.items():
  807. Common.logger(log_type, crawler).info(f"{k}:{v}")
  808. cls.download_publish(log_type=log_type,
  809. crawler=crawler,
  810. video_dict=video_dict,
  811. rule_dict=rule_dict,
  812. strategy=strategy,
  813. our_uid=our_uid,
  814. oss_endpoint=oss_endpoint,
  815. env=env,
  816. machine=machine)
  817. except Exception as e:
  818. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  819. @classmethod
  820. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  821. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  822. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  823. return len(repeat_video)
  824. # 下载 / 上传
  825. @classmethod
  826. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  827. try:
  828. if cls.download_rule(video_dict, rule_dict) is False:
  829. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  830. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  831. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  832. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  833. Common.logger(log_type, crawler).info('视频已下载\n')
  834. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  835. # Common.logger(log_type, crawler).info('视频已下载\n')
  836. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  837. # Common.logger(log_type, crawler).info('视频已下载\n')
  838. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  839. # Common.logger(log_type, crawler).info('视频已下载\n')
  840. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  841. # Common.logger(log_type, crawler).info('视频已存在\n')
  842. else:
  843. # 下载视频
  844. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  845. # 下载音频
  846. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  847. # 合成音视频
  848. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  849. ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  850. if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  851. Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  852. # 删除视频文件夹
  853. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  854. return
  855. # 下载封面
  856. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  857. # 保存视频信息至txt
  858. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  859. # 上传视频
  860. Common.logger(log_type, crawler).info("开始上传视频...")
  861. our_video_id = Publish.upload_and_publish(log_type=log_type,
  862. crawler=crawler,
  863. strategy=strategy,
  864. our_uid=our_uid,
  865. env=env,
  866. oss_endpoint=oss_endpoint)
  867. if env == 'dev':
  868. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  869. else:
  870. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  871. Common.logger(log_type, crawler).info("视频上传完成")
  872. if our_video_id is None:
  873. # 删除视频文件夹
  874. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  875. return
  876. # 视频写入飞书
  877. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  878. upload_time = int(time.time())
  879. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  880. "定向榜",
  881. video_dict['video_title'],
  882. str(video_dict['video_id']),
  883. our_video_link,
  884. video_dict['gid'],
  885. video_dict['play_cnt'],
  886. video_dict['comment_cnt'],
  887. video_dict['like_cnt'],
  888. video_dict['share_cnt'],
  889. video_dict['duration'],
  890. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  891. video_dict['publish_time_str'],
  892. video_dict['user_name'],
  893. video_dict['user_id'],
  894. video_dict['avatar_url'],
  895. video_dict['cover_url'],
  896. video_dict['video_url'],
  897. video_dict['audio_url']]]
  898. time.sleep(1)
  899. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  900. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  901. # 视频信息保存数据库
  902. insert_sql = f""" insert into crawler_video(video_id,
  903. user_id,
  904. out_user_id,
  905. platform,
  906. strategy,
  907. out_video_id,
  908. video_title,
  909. cover_url,
  910. video_url,
  911. duration,
  912. publish_time,
  913. play_cnt,
  914. crawler_rule,
  915. width,
  916. height)
  917. values({our_video_id},
  918. {our_uid},
  919. "{video_dict['user_id']}",
  920. "{cls.platform}",
  921. "定向爬虫策略",
  922. "{video_dict['video_id']}",
  923. "{video_dict['video_title']}",
  924. "{video_dict['cover_url']}",
  925. "{video_dict['video_url']}",
  926. {int(video_dict['duration'])},
  927. "{video_dict['publish_time_str']}",
  928. {int(video_dict['play_cnt'])},
  929. '{json.dumps(rule_dict)}',
  930. {int(video_dict['video_width'])},
  931. {int(video_dict['video_height'])}) """
  932. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  933. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  934. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  935. except Exception as e:
  936. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  937. @classmethod
  938. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  939. try:
  940. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  941. for user in user_list:
  942. out_uid = user["out_uid"]
  943. user_name = user["user_name"]
  944. our_uid = user["our_uid"]
  945. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  946. cls.get_videolist(log_type=log_type,
  947. crawler=crawler,
  948. strategy=strategy,
  949. our_uid=our_uid,
  950. out_uid=out_uid,
  951. oss_endpoint=oss_endpoint,
  952. env=env,
  953. machine=machine)
  954. cls.offset = 0
  955. time.sleep(1)
  956. except Exception as e:
  957. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  958. if __name__ == '__main__':
  959. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  960. # Follow.get_videolist(log_type="follow",
  961. # crawler="xigua",
  962. # strategy="定向爬虫策略",
  963. # our_uid="6267141",
  964. # out_uid="95420624045",
  965. # oss_endpoint="out",
  966. # env="dev",
  967. # machine="local")
  968. # print(Follow.random_signature())
  969. # rule = Follow.get_rule("follow", "xigua")
  970. # print(type(rule))
  971. # print(type(json.dumps(rule)))
  972. # print(json.dumps(rule))
  973. Follow.get_user_list("follow", "xigua", "5tlTYB", "prod", "local")
  974. pass