xigua_follow.py 61 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. import requests
  13. import urllib3
  14. from requests.adapters import HTTPAdapter
  15. from selenium.webdriver import DesiredCapabilities
  16. from selenium.webdriver.chrome.service import Service
  17. from selenium.webdriver.common.by import By
  18. from selenium import webdriver
  19. from lxml import etree
  20. sys.path.append(os.getcwd())
  21. from common.db import MysqlHelper
  22. from common.getuser import getUser
  23. from common.common import Common
  24. from common.feishu import Feishu
  25. from common.publish import Publish
  26. class Follow:
  27. # 个人主页视频翻页参数
  28. offset = 0
  29. platform = "西瓜视频"
  30. tag = "西瓜视频爬虫,定向爬虫策略"
  31. @classmethod
  32. def get_rule(cls, log_type, crawler):
  33. try:
  34. while True:
  35. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  36. if rule_sheet is None:
  37. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  38. time.sleep(10)
  39. continue
  40. rule_dict = {
  41. "play_cnt": int(rule_sheet[1][2]),
  42. "comment_cnt": int(rule_sheet[2][2]),
  43. "like_cnt": int(rule_sheet[3][2]),
  44. "duration": int(rule_sheet[4][2]),
  45. "publish_time": int(rule_sheet[5][2]),
  46. "video_width": int(rule_sheet[6][2]),
  47. "video_height": int(rule_sheet[7][2]),
  48. }
  49. return rule_dict
  50. except Exception as e:
  51. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  52. # 下载规则
  53. @classmethod
  54. def download_rule(cls, video_info_dict, rule_dict):
  55. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  56. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  57. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  58. if video_info_dict['duration'] >= rule_dict['duration']:
  59. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  60. or video_info_dict['video_height'] >= rule_dict['video_height']:
  61. return True
  62. else:
  63. return False
  64. else:
  65. return False
  66. else:
  67. return False
  68. else:
  69. return False
  70. else:
  71. return False
  72. # 过滤词库
  73. @classmethod
  74. def filter_words(cls, log_type, crawler):
  75. try:
  76. while True:
  77. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  78. if filter_words_sheet is None:
  79. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  80. continue
  81. filter_words_list = []
  82. for x in filter_words_sheet:
  83. for y in x:
  84. if y is None:
  85. pass
  86. else:
  87. filter_words_list.append(y)
  88. return filter_words_list
  89. except Exception as e:
  90. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  91. @classmethod
  92. def get_out_user_info(cls, log_type, crawler, out_uid):
  93. try:
  94. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  95. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  96. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  97. url = f"https://www.ixigua.com/home/{out_uid}"
  98. urllib3.disable_warnings()
  99. s = requests.session()
  100. # max_retries=3 重试3次
  101. s.mount('http://', HTTPAdapter(max_retries=3))
  102. s.mount('https://', HTTPAdapter(max_retries=3))
  103. response = s.get(url=url, headers=headers, proxies=Common.tunnel_proxies(), verify=False, timeout=5).text
  104. html = etree.HTML(response)
  105. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  106. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  107. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  108. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  109. if "万" in out_follow_str:
  110. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  111. else:
  112. out_follow = int(out_follow_str.replace(",", ""))
  113. if "万" in out_fans_str:
  114. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  115. else:
  116. out_fans = int(out_fans_str.replace(",", ""))
  117. if "万" in out_like_str:
  118. out_like = int(float(out_like_str.split("万")[0])*10000)
  119. else:
  120. out_like = int(out_like_str.replace(",", ""))
  121. out_user_dict = {
  122. "out_follow": out_follow,
  123. "out_fans": out_fans,
  124. "out_like": out_like,
  125. "out_avatar_url": out_avatar_url,
  126. }
  127. # for k, v in out_user_dict.items():
  128. # print(f"{k}:{v}")
  129. return out_user_dict
  130. except Exception as e:
  131. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  132. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  133. @classmethod
  134. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  135. try:
  136. while True:
  137. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  138. if user_sheet is None:
  139. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  140. continue
  141. our_user_list = []
  142. for i in range(1, len(user_sheet)):
  143. # for i in range(428, len(user_sheet)):
  144. out_uid = user_sheet[i][2]
  145. user_name = user_sheet[i][3]
  146. our_uid = user_sheet[i][6]
  147. our_user_link = user_sheet[i][7]
  148. if out_uid is None or user_name is None:
  149. Common.logger(log_type, crawler).info("空行\n")
  150. else:
  151. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  152. if our_uid is None:
  153. try:
  154. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  155. except Exception as e:
  156. continue
  157. out_user_dict = {
  158. "out_uid": out_uid,
  159. "user_name": user_name,
  160. "out_avatar_url": out_user_info["out_avatar_url"],
  161. "out_create_time": '',
  162. "out_tag": '',
  163. "out_play_cnt": 0,
  164. "out_fans": out_user_info["out_fans"],
  165. "out_follow": out_user_info["out_follow"],
  166. "out_friend": 0,
  167. "out_like": out_user_info["out_like"],
  168. "platform": cls.platform,
  169. "tag": cls.tag,
  170. }
  171. our_user_dict = getUser.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  172. our_uid = our_user_dict['our_uid']
  173. our_user_link = our_user_dict['our_user_link']
  174. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  175. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  176. our_user_list.append(our_user_dict)
  177. else:
  178. our_user_dict = {
  179. 'out_uid': out_uid,
  180. 'user_name': user_name,
  181. 'our_uid': our_uid,
  182. 'our_user_link': our_user_link,
  183. }
  184. our_user_list.append(our_user_dict)
  185. return our_user_list
  186. except Exception as e:
  187. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  188. @classmethod
  189. def random_signature(cls):
  190. src_digits = string.digits # string_数字
  191. src_uppercase = string.ascii_uppercase # string_大写字母
  192. src_lowercase = string.ascii_lowercase # string_小写字母
  193. digits_num = random.randint(1, 6)
  194. uppercase_num = random.randint(1, 26 - digits_num - 1)
  195. lowercase_num = 26 - (digits_num + uppercase_num)
  196. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  197. src_lowercase, lowercase_num)
  198. random.shuffle(password)
  199. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  200. new_password_start = new_password[0:18]
  201. new_password_end = new_password[-7:]
  202. if new_password[18] == '8':
  203. new_password = new_password_start + 'w' + new_password_end
  204. elif new_password[18] == '9':
  205. new_password = new_password_start + 'x' + new_password_end
  206. elif new_password[18] == '-':
  207. new_password = new_password_start + 'y' + new_password_end
  208. elif new_password[18] == '.':
  209. new_password = new_password_start + 'z' + new_password_end
  210. else:
  211. new_password = new_password_start + 'y' + new_password_end
  212. return new_password
  213. @classmethod
  214. def get_signature(cls, log_type, crawler, out_uid, machine):
  215. try:
  216. # 打印请求配置
  217. ca = DesiredCapabilities.CHROME
  218. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  219. # 不打开浏览器运行
  220. chrome_options = webdriver.ChromeOptions()
  221. chrome_options.add_argument("--headless")
  222. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  223. chrome_options.add_argument("--no-sandbox")
  224. # driver初始化
  225. if machine == 'aliyun' or machine == 'aliyun_hk':
  226. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  227. elif machine == 'macpro':
  228. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  229. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  230. elif machine == 'macair':
  231. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  232. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  233. else:
  234. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  235. driver.implicitly_wait(10)
  236. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  237. time.sleep(3)
  238. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  239. signature = data_src.split("x-signature=")[-1]
  240. return signature
  241. except Exception as e:
  242. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  243. # 获取视频详情
  244. @classmethod
  245. def get_video_url(cls, log_type, crawler, gid):
  246. try:
  247. url = 'https://www.ixigua.com/api/mixVideo/information?'
  248. headers = {
  249. "accept-encoding": "gzip, deflate",
  250. "accept-language": "zh-CN,zh-Hans;q=0.9",
  251. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  252. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  253. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  254. }
  255. params = {
  256. 'mixId': gid,
  257. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  258. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  259. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  260. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  261. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  262. }
  263. cookies = {
  264. 'ixigua-a-s': '1',
  265. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  266. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  267. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  268. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  269. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  270. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  271. '__ac_nonce': '06304878000964fdad287',
  272. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  273. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  274. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  275. '_tea_utm_cache_1300': 'undefined',
  276. 'support_avif': 'false',
  277. 'support_webp': 'false',
  278. 'xiguavideopcwebid': '7134967546256016900',
  279. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  280. }
  281. urllib3.disable_warnings()
  282. s = requests.session()
  283. # max_retries=3 重试3次
  284. s.mount('http://', HTTPAdapter(max_retries=3))
  285. s.mount('https://', HTTPAdapter(max_retries=3))
  286. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies(), timeout=5)
  287. response.close()
  288. if 'data' not in response.json() or response.json()['data'] == '':
  289. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  290. else:
  291. video_info = response.json()['data']['gidInformation']['packerData']['video']
  292. video_url_dict = {}
  293. # video_url
  294. if 'videoResource' not in video_info:
  295. video_url_dict["video_url"] = ''
  296. video_url_dict["audio_url"] = ''
  297. video_url_dict["video_width"] = 0
  298. video_url_dict["video_height"] = 0
  299. elif 'dash_120fps' in video_info['videoResource']:
  300. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  301. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  302. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  303. if len(video_url) % 3 == 1:
  304. video_url += '=='
  305. elif len(video_url) % 3 == 2:
  306. video_url += '='
  307. elif len(audio_url) % 3 == 1:
  308. audio_url += '=='
  309. elif len(audio_url) % 3 == 2:
  310. audio_url += '='
  311. video_url = base64.b64decode(video_url).decode('utf8')
  312. audio_url = base64.b64decode(audio_url).decode('utf8')
  313. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  314. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  315. video_url_dict["video_url"] = video_url
  316. video_url_dict["audio_url"] = audio_url
  317. video_url_dict["video_width"] = video_width
  318. video_url_dict["video_height"] = video_height
  319. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  320. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  321. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  322. if len(video_url) % 3 == 1:
  323. video_url += '=='
  324. elif len(video_url) % 3 == 2:
  325. video_url += '='
  326. elif len(audio_url) % 3 == 1:
  327. audio_url += '=='
  328. elif len(audio_url) % 3 == 2:
  329. audio_url += '='
  330. video_url = base64.b64decode(video_url).decode('utf8')
  331. audio_url = base64.b64decode(audio_url).decode('utf8')
  332. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  333. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  334. video_url_dict["video_url"] = video_url
  335. video_url_dict["audio_url"] = audio_url
  336. video_url_dict["video_width"] = video_width
  337. video_url_dict["video_height"] = video_height
  338. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  339. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  340. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  341. if len(video_url) % 3 == 1:
  342. video_url += '=='
  343. elif len(video_url) % 3 == 2:
  344. video_url += '='
  345. elif len(audio_url) % 3 == 1:
  346. audio_url += '=='
  347. elif len(audio_url) % 3 == 2:
  348. audio_url += '='
  349. video_url = base64.b64decode(video_url).decode('utf8')
  350. audio_url = base64.b64decode(audio_url).decode('utf8')
  351. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  352. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  353. video_url_dict["video_url"] = video_url
  354. video_url_dict["audio_url"] = audio_url
  355. video_url_dict["video_width"] = video_width
  356. video_url_dict["video_height"] = video_height
  357. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  358. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  359. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  360. if len(video_url) % 3 == 1:
  361. video_url += '=='
  362. elif len(video_url) % 3 == 2:
  363. video_url += '='
  364. elif len(audio_url) % 3 == 1:
  365. audio_url += '=='
  366. elif len(audio_url) % 3 == 2:
  367. audio_url += '='
  368. video_url = base64.b64decode(video_url).decode('utf8')
  369. audio_url = base64.b64decode(audio_url).decode('utf8')
  370. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  371. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  372. video_url_dict["video_url"] = video_url
  373. video_url_dict["audio_url"] = audio_url
  374. video_url_dict["video_width"] = video_width
  375. video_url_dict["video_height"] = video_height
  376. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  377. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  378. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  379. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  380. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  381. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  382. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  383. if len(video_url) % 3 == 1:
  384. video_url += '=='
  385. elif len(video_url) % 3 == 2:
  386. video_url += '='
  387. elif len(audio_url) % 3 == 1:
  388. audio_url += '=='
  389. elif len(audio_url) % 3 == 2:
  390. audio_url += '='
  391. video_url = base64.b64decode(video_url).decode('utf8')
  392. audio_url = base64.b64decode(audio_url).decode('utf8')
  393. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  394. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  395. video_url_dict["video_url"] = video_url
  396. video_url_dict["audio_url"] = audio_url
  397. video_url_dict["video_width"] = video_width
  398. video_url_dict["video_height"] = video_height
  399. else:
  400. video_url_dict["video_url"] = ''
  401. video_url_dict["audio_url"] = ''
  402. video_url_dict["video_width"] = 0
  403. video_url_dict["video_height"] = 0
  404. elif 'dash' in video_info['videoResource']:
  405. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  406. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  407. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  408. if len(video_url) % 3 == 1:
  409. video_url += '=='
  410. elif len(video_url) % 3 == 2:
  411. video_url += '='
  412. elif len(audio_url) % 3 == 1:
  413. audio_url += '=='
  414. elif len(audio_url) % 3 == 2:
  415. audio_url += '='
  416. video_url = base64.b64decode(video_url).decode('utf8')
  417. audio_url = base64.b64decode(audio_url).decode('utf8')
  418. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  419. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  420. video_url_dict["video_url"] = video_url
  421. video_url_dict["audio_url"] = audio_url
  422. video_url_dict["video_width"] = video_width
  423. video_url_dict["video_height"] = video_height
  424. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  425. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  426. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  427. if len(video_url) % 3 == 1:
  428. video_url += '=='
  429. elif len(video_url) % 3 == 2:
  430. video_url += '='
  431. elif len(audio_url) % 3 == 1:
  432. audio_url += '=='
  433. elif len(audio_url) % 3 == 2:
  434. audio_url += '='
  435. video_url = base64.b64decode(video_url).decode('utf8')
  436. audio_url = base64.b64decode(audio_url).decode('utf8')
  437. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  438. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  439. video_url_dict["video_url"] = video_url
  440. video_url_dict["audio_url"] = audio_url
  441. video_url_dict["video_width"] = video_width
  442. video_url_dict["video_height"] = video_height
  443. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  444. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  445. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  446. if len(video_url) % 3 == 1:
  447. video_url += '=='
  448. elif len(video_url) % 3 == 2:
  449. video_url += '='
  450. elif len(audio_url) % 3 == 1:
  451. audio_url += '=='
  452. elif len(audio_url) % 3 == 2:
  453. audio_url += '='
  454. video_url = base64.b64decode(video_url).decode('utf8')
  455. audio_url = base64.b64decode(audio_url).decode('utf8')
  456. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  457. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  458. video_url_dict["video_url"] = video_url
  459. video_url_dict["audio_url"] = audio_url
  460. video_url_dict["video_width"] = video_width
  461. video_url_dict["video_height"] = video_height
  462. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  463. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  464. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  465. if len(video_url) % 3 == 1:
  466. video_url += '=='
  467. elif len(video_url) % 3 == 2:
  468. video_url += '='
  469. elif len(audio_url) % 3 == 1:
  470. audio_url += '=='
  471. elif len(audio_url) % 3 == 2:
  472. audio_url += '='
  473. video_url = base64.b64decode(video_url).decode('utf8')
  474. audio_url = base64.b64decode(audio_url).decode('utf8')
  475. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  476. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  477. video_url_dict["video_url"] = video_url
  478. video_url_dict["audio_url"] = audio_url
  479. video_url_dict["video_width"] = video_width
  480. video_url_dict["video_height"] = video_height
  481. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  482. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  483. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  484. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  485. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  486. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  487. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  488. if len(video_url) % 3 == 1:
  489. video_url += '=='
  490. elif len(video_url) % 3 == 2:
  491. video_url += '='
  492. elif len(audio_url) % 3 == 1:
  493. audio_url += '=='
  494. elif len(audio_url) % 3 == 2:
  495. audio_url += '='
  496. video_url = base64.b64decode(video_url).decode('utf8')
  497. audio_url = base64.b64decode(audio_url).decode('utf8')
  498. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  499. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  500. video_url_dict["video_url"] = video_url
  501. video_url_dict["audio_url"] = audio_url
  502. video_url_dict["video_width"] = video_width
  503. video_url_dict["video_height"] = video_height
  504. else:
  505. video_url_dict["video_url"] = ''
  506. video_url_dict["audio_url"] = ''
  507. video_url_dict["video_width"] = 0
  508. video_url_dict["video_height"] = 0
  509. elif 'normal' in video_info['videoResource']:
  510. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  511. video_info['videoResource']['normal']['video_list']:
  512. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  513. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  514. if len(video_url) % 3 == 1:
  515. video_url += '=='
  516. elif len(video_url) % 3 == 2:
  517. video_url += '='
  518. elif len(audio_url) % 3 == 1:
  519. audio_url += '=='
  520. elif len(audio_url) % 3 == 2:
  521. audio_url += '='
  522. video_url = base64.b64decode(video_url).decode('utf8')
  523. audio_url = base64.b64decode(audio_url).decode('utf8')
  524. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  525. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  526. video_url_dict["video_url"] = video_url
  527. video_url_dict["audio_url"] = audio_url
  528. video_url_dict["video_width"] = video_width
  529. video_url_dict["video_height"] = video_height
  530. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  531. video_info['videoResource']['normal']['video_list']:
  532. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  533. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  534. if len(video_url) % 3 == 1:
  535. video_url += '=='
  536. elif len(video_url) % 3 == 2:
  537. video_url += '='
  538. elif len(audio_url) % 3 == 1:
  539. audio_url += '=='
  540. elif len(audio_url) % 3 == 2:
  541. audio_url += '='
  542. video_url = base64.b64decode(video_url).decode('utf8')
  543. audio_url = base64.b64decode(audio_url).decode('utf8')
  544. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  545. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  546. video_url_dict["video_url"] = video_url
  547. video_url_dict["audio_url"] = audio_url
  548. video_url_dict["video_width"] = video_width
  549. video_url_dict["video_height"] = video_height
  550. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  551. video_info['videoResource']['normal']['video_list']:
  552. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  553. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  554. if len(video_url) % 3 == 1:
  555. video_url += '=='
  556. elif len(video_url) % 3 == 2:
  557. video_url += '='
  558. elif len(audio_url) % 3 == 1:
  559. audio_url += '=='
  560. elif len(audio_url) % 3 == 2:
  561. audio_url += '='
  562. video_url = base64.b64decode(video_url).decode('utf8')
  563. audio_url = base64.b64decode(audio_url).decode('utf8')
  564. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  565. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  566. video_url_dict["video_url"] = video_url
  567. video_url_dict["audio_url"] = audio_url
  568. video_url_dict["video_width"] = video_width
  569. video_url_dict["video_height"] = video_height
  570. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  571. video_info['videoResource']['normal']['video_list']:
  572. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  573. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  574. if len(video_url) % 3 == 1:
  575. video_url += '=='
  576. elif len(video_url) % 3 == 2:
  577. video_url += '='
  578. elif len(audio_url) % 3 == 1:
  579. audio_url += '=='
  580. elif len(audio_url) % 3 == 2:
  581. audio_url += '='
  582. video_url = base64.b64decode(video_url).decode('utf8')
  583. audio_url = base64.b64decode(audio_url).decode('utf8')
  584. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  585. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  586. video_url_dict["video_url"] = video_url
  587. video_url_dict["audio_url"] = audio_url
  588. video_url_dict["video_width"] = video_width
  589. video_url_dict["video_height"] = video_height
  590. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  591. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  592. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  593. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  594. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  595. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  596. 'backup_url_1']
  597. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  598. 'backup_url_1']
  599. if len(video_url) % 3 == 1:
  600. video_url += '=='
  601. elif len(video_url) % 3 == 2:
  602. video_url += '='
  603. elif len(audio_url) % 3 == 1:
  604. audio_url += '=='
  605. elif len(audio_url) % 3 == 2:
  606. audio_url += '='
  607. video_url = base64.b64decode(video_url).decode('utf8')
  608. audio_url = base64.b64decode(audio_url).decode('utf8')
  609. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  610. 'vwidth']
  611. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  612. 'vheight']
  613. video_url_dict["video_url"] = video_url
  614. video_url_dict["audio_url"] = audio_url
  615. video_url_dict["video_width"] = video_width
  616. video_url_dict["video_height"] = video_height
  617. else:
  618. video_url_dict["video_url"] = ''
  619. video_url_dict["audio_url"] = ''
  620. video_url_dict["video_width"] = 0
  621. video_url_dict["video_height"] = 0
  622. else:
  623. video_url_dict["video_url"] = ''
  624. video_url_dict["audio_url"] = ''
  625. video_url_dict["video_width"] = 0
  626. video_url_dict["video_height"] = 0
  627. return video_url_dict
  628. except Exception as e:
  629. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  630. @classmethod
  631. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  632. try:
  633. signature = cls.random_signature()
  634. while True:
  635. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  636. params = {
  637. 'to_user_id': str(out_uid),
  638. 'offset': str(cls.offset),
  639. 'limit': '30',
  640. 'maxBehotTime': '0',
  641. 'order': 'new',
  642. 'isHome': '0',
  643. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  644. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  645. '_signature': signature,
  646. }
  647. headers = {
  648. # 'authority': 'www.ixigua.com',
  649. # 'accept': 'application/json, text/plain, */*',
  650. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  651. # 'cache-control': 'no-cache',
  652. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  653. # 'pragma': 'no-cache',
  654. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  655. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  656. # 'sec-ch-ua-mobile': '?0',
  657. # 'sec-ch-ua-platform': '"macOS"',
  658. # 'sec-fetch-dest': 'empty',
  659. # 'sec-fetch-mode': 'cors',
  660. # 'sec-fetch-site': 'same-origin',
  661. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  662. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  663. }
  664. urllib3.disable_warnings()
  665. s = requests.session()
  666. # max_retries=3 重试3次
  667. s.mount('http://', HTTPAdapter(max_retries=3))
  668. s.mount('https://', HTTPAdapter(max_retries=3))
  669. response = s.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False, timeout=5)
  670. response.close()
  671. cls.offset += 30
  672. if response.status_code != 200:
  673. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  674. cls.offset = 0
  675. return
  676. elif 'data' not in response.text:
  677. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  678. cls.offset = 0
  679. return
  680. elif 'videoList' not in response.json()["data"]:
  681. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  682. cls.offset = 0
  683. return
  684. else:
  685. videoList = response.json()['data']['videoList']
  686. for i in range(len(videoList)):
  687. # video_title
  688. if 'title' not in videoList[i]:
  689. video_title = 0
  690. else:
  691. video_title = videoList[i]['title'].strip().replace('手游', '') \
  692. .replace('/', '').replace('\/', '').replace('\n', '')
  693. # video_id
  694. if 'video_id' not in videoList[i]:
  695. video_id = 0
  696. else:
  697. video_id = videoList[i]['video_id']
  698. # gid
  699. if 'gid' not in videoList[i]:
  700. gid = 0
  701. else:
  702. gid = videoList[i]['gid']
  703. # play_cnt
  704. if 'video_detail_info' not in videoList[i]:
  705. play_cnt = 0
  706. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  707. play_cnt = 0
  708. else:
  709. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  710. # comment_cnt
  711. if 'comment_count' not in videoList[i]:
  712. comment_cnt = 0
  713. else:
  714. comment_cnt = videoList[i]['comment_count']
  715. # like_cnt
  716. if 'digg_count' not in videoList[i]:
  717. like_cnt = 0
  718. else:
  719. like_cnt = videoList[i]['digg_count']
  720. # share_cnt
  721. share_cnt = 0
  722. # video_duration
  723. if 'video_duration' not in videoList[i]:
  724. video_duration = 0
  725. else:
  726. video_duration = int(videoList[i]['video_duration'])
  727. # send_time
  728. if 'publish_time' not in videoList[i]:
  729. publish_time = 0
  730. else:
  731. publish_time = videoList[i]['publish_time']
  732. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  733. # is_top
  734. if 'is_top' not in videoList[i]:
  735. is_top = 0
  736. else:
  737. is_top = videoList[i]['is_top']
  738. # user_name
  739. if 'user_info' not in videoList[i]:
  740. user_name = 0
  741. elif 'name' not in videoList[i]['user_info']:
  742. user_name = 0
  743. else:
  744. user_name = videoList[i]['user_info']['name']
  745. # user_id
  746. if 'user_info' not in videoList[i]:
  747. user_id = 0
  748. elif 'user_id' not in videoList[i]['user_info']:
  749. user_id = 0
  750. else:
  751. user_id = videoList[i]['user_info']['user_id']
  752. # avatar_url
  753. if 'user_info' not in videoList[i]:
  754. avatar_url = 0
  755. elif 'avatar_url' not in videoList[i]['user_info']:
  756. avatar_url = 0
  757. else:
  758. avatar_url = videoList[i]['user_info']['avatar_url']
  759. # cover_url
  760. if 'video_detail_info' not in videoList[i]:
  761. cover_url = 0
  762. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  763. cover_url = 0
  764. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  765. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  766. else:
  767. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  768. while True:
  769. rule_dict = cls.get_rule(log_type, crawler)
  770. if rule_dict is None:
  771. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  772. time.sleep(10)
  773. else:
  774. break
  775. if gid == 0 or video_id == 0 or cover_url == 0:
  776. Common.logger(log_type, crawler).info('无效视频\n')
  777. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  778. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  779. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  780. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  781. cls.offset = 0
  782. return
  783. else:
  784. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  785. video_url = video_url_dict["video_url"]
  786. audio_url = video_url_dict["audio_url"]
  787. video_width = video_url_dict["video_width"]
  788. video_height = video_url_dict["video_height"]
  789. video_dict = {'video_title': video_title,
  790. 'video_id': video_id,
  791. 'gid': gid,
  792. 'play_cnt': play_cnt,
  793. 'comment_cnt': comment_cnt,
  794. 'like_cnt': like_cnt,
  795. 'share_cnt': share_cnt,
  796. 'video_width': video_width,
  797. 'video_height': video_height,
  798. 'duration': video_duration,
  799. 'publish_time_stamp': publish_time,
  800. 'publish_time_str': publish_time_str,
  801. 'is_top': is_top,
  802. 'user_name': user_name,
  803. 'user_id': user_id,
  804. 'avatar_url': avatar_url,
  805. 'cover_url': cover_url,
  806. 'audio_url': audio_url,
  807. 'video_url': video_url,
  808. 'session': signature}
  809. for k, v in video_dict.items():
  810. Common.logger(log_type, crawler).info(f"{k}:{v}")
  811. cls.download_publish(log_type=log_type,
  812. crawler=crawler,
  813. video_dict=video_dict,
  814. rule_dict=rule_dict,
  815. strategy=strategy,
  816. our_uid=our_uid,
  817. oss_endpoint=oss_endpoint,
  818. env=env,
  819. machine=machine)
  820. except Exception as e:
  821. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  822. @classmethod
  823. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  824. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  825. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  826. return len(repeat_video)
  827. # 下载 / 上传
  828. @classmethod
  829. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  830. try:
  831. if cls.download_rule(video_dict, rule_dict) is False:
  832. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  833. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  834. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  835. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  836. Common.logger(log_type, crawler).info('视频已下载\n')
  837. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  838. # Common.logger(log_type, crawler).info('视频已下载\n')
  839. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  840. # Common.logger(log_type, crawler).info('视频已下载\n')
  841. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  842. # Common.logger(log_type, crawler).info('视频已下载\n')
  843. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  844. # Common.logger(log_type, crawler).info('视频已存在\n')
  845. else:
  846. # 下载视频
  847. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  848. # 下载音频
  849. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  850. # 合成音视频
  851. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  852. ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  853. if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  854. Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  855. # 删除视频文件夹
  856. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  857. return
  858. # 下载封面
  859. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  860. # 保存视频信息至txt
  861. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  862. # 上传视频
  863. Common.logger(log_type, crawler).info("开始上传视频...")
  864. our_video_id = Publish.upload_and_publish(log_type=log_type,
  865. crawler=crawler,
  866. strategy=strategy,
  867. our_uid=our_uid,
  868. env=env,
  869. oss_endpoint=oss_endpoint)
  870. if env == 'dev':
  871. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  872. else:
  873. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  874. Common.logger(log_type, crawler).info("视频上传完成")
  875. if our_video_id is None:
  876. # 删除视频文件夹
  877. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  878. return
  879. # 视频写入飞书
  880. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  881. upload_time = int(time.time())
  882. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  883. "定向榜",
  884. video_dict['video_title'],
  885. str(video_dict['video_id']),
  886. our_video_link,
  887. video_dict['gid'],
  888. video_dict['play_cnt'],
  889. video_dict['comment_cnt'],
  890. video_dict['like_cnt'],
  891. video_dict['share_cnt'],
  892. video_dict['duration'],
  893. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  894. video_dict['publish_time_str'],
  895. video_dict['user_name'],
  896. video_dict['user_id'],
  897. video_dict['avatar_url'],
  898. video_dict['cover_url'],
  899. video_dict['video_url'],
  900. video_dict['audio_url']]]
  901. time.sleep(1)
  902. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  903. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  904. # 视频信息保存数据库
  905. insert_sql = f""" insert into crawler_video(video_id,
  906. user_id,
  907. out_user_id,
  908. platform,
  909. strategy,
  910. out_video_id,
  911. video_title,
  912. cover_url,
  913. video_url,
  914. duration,
  915. publish_time,
  916. play_cnt,
  917. crawler_rule,
  918. width,
  919. height)
  920. values({our_video_id},
  921. {our_uid},
  922. "{video_dict['user_id']}",
  923. "{cls.platform}",
  924. "定向爬虫策略",
  925. "{video_dict['video_id']}",
  926. "{video_dict['video_title']}",
  927. "{video_dict['cover_url']}",
  928. "{video_dict['video_url']}",
  929. {int(video_dict['duration'])},
  930. "{video_dict['publish_time_str']}",
  931. {int(video_dict['play_cnt'])},
  932. '{json.dumps(rule_dict)}',
  933. {int(video_dict['video_width'])},
  934. {int(video_dict['video_height'])}) """
  935. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  936. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  937. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  938. except Exception as e:
  939. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  940. @classmethod
  941. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  942. try:
  943. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  944. for user in user_list:
  945. out_uid = user["out_uid"]
  946. user_name = user["user_name"]
  947. our_uid = user["our_uid"]
  948. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  949. cls.get_videolist(log_type=log_type,
  950. crawler=crawler,
  951. strategy=strategy,
  952. our_uid=our_uid,
  953. out_uid=out_uid,
  954. oss_endpoint=oss_endpoint,
  955. env=env,
  956. machine=machine)
  957. cls.offset = 0
  958. time.sleep(1)
  959. except Exception as e:
  960. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  961. if __name__ == '__main__':
  962. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  963. # Follow.get_videolist(log_type="follow",
  964. # crawler="xigua",
  965. # strategy="定向爬虫策略",
  966. # our_uid="6267141",
  967. # out_uid="95420624045",
  968. # oss_endpoint="out",
  969. # env="dev",
  970. # machine="local")
  971. # print(Follow.random_signature())
  972. # rule = Follow.get_rule("follow", "xigua")
  973. # print(type(rule))
  974. # print(type(json.dumps(rule)))
  975. # print(json.dumps(rule))
  976. Follow.get_user_list("follow", "xigua", "5tlTYB", "prod", "local")
  977. pass