xigua_follow.py 60 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. import requests
  13. import urllib3
  14. from requests.adapters import HTTPAdapter
  15. from selenium.webdriver import DesiredCapabilities
  16. from selenium.webdriver.chrome.service import Service
  17. from selenium.webdriver.common.by import By
  18. from selenium import webdriver
  19. from lxml import etree
  20. sys.path.append(os.getcwd())
  21. from common.db import MysqlHelper
  22. from common.users import Users
  23. from common.common import Common
  24. from common.feishu import Feishu
  25. from common.publish import Publish
  26. class Follow:
  27. # 个人主页视频翻页参数
  28. offset = 0
  29. platform = "西瓜视频"
  30. tag = "西瓜视频爬虫,定向爬虫策略"
  31. @classmethod
  32. def get_rule(cls, log_type, crawler):
  33. try:
  34. while True:
  35. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  36. if rule_sheet is None:
  37. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  38. time.sleep(10)
  39. continue
  40. rule_dict = {
  41. "play_cnt": int(rule_sheet[1][2]),
  42. "comment_cnt": int(rule_sheet[2][2]),
  43. "like_cnt": int(rule_sheet[3][2]),
  44. "duration": int(rule_sheet[4][2]),
  45. "publish_time": int(rule_sheet[5][2]),
  46. "video_width": int(rule_sheet[6][2]),
  47. "video_height": int(rule_sheet[7][2]),
  48. }
  49. return rule_dict
  50. except Exception as e:
  51. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  52. # 下载规则
  53. @classmethod
  54. def download_rule(cls, video_info_dict, rule_dict):
  55. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  56. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  57. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  58. if video_info_dict['duration'] >= rule_dict['duration']:
  59. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  60. or video_info_dict['video_height'] >= rule_dict['video_height']:
  61. return True
  62. else:
  63. return False
  64. else:
  65. return False
  66. else:
  67. return False
  68. else:
  69. return False
  70. else:
  71. return False
  72. # 过滤词库
  73. @classmethod
  74. def filter_words(cls, log_type, crawler):
  75. try:
  76. while True:
  77. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  78. if filter_words_sheet is None:
  79. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  80. continue
  81. filter_words_list = []
  82. for x in filter_words_sheet:
  83. for y in x:
  84. if y is None:
  85. pass
  86. else:
  87. filter_words_list.append(y)
  88. return filter_words_list
  89. except Exception as e:
  90. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  91. @classmethod
  92. def get_out_user_info(cls, log_type, crawler, out_uid):
  93. try:
  94. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  95. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  96. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  97. url = f"https://www.ixigua.com/home/{out_uid}"
  98. urllib3.disable_warnings()
  99. s = requests.session()
  100. # max_retries=3 重试3次
  101. s.mount('http://', HTTPAdapter(max_retries=3))
  102. s.mount('https://', HTTPAdapter(max_retries=3))
  103. response = s.get(url=url, headers=headers, proxies=Common.tunnel_proxies(), verify=False, timeout=5).text
  104. html = etree.HTML(response)
  105. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  106. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  107. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  108. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  109. if "万" in out_follow_str:
  110. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  111. else:
  112. out_follow = int(out_follow_str.replace(",", ""))
  113. if "万" in out_fans_str:
  114. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  115. else:
  116. out_fans = int(out_fans_str.replace(",", ""))
  117. if "万" in out_like_str:
  118. out_like = int(float(out_like_str.split("万")[0])*10000)
  119. else:
  120. out_like = int(out_like_str.replace(",", ""))
  121. out_user_dict = {
  122. "out_follow": out_follow,
  123. "out_fans": out_fans,
  124. "out_like": out_like,
  125. "out_avatar_url": out_avatar_url,
  126. }
  127. # for k, v in out_user_dict.items():
  128. # print(f"{k}:{v}")
  129. return out_user_dict
  130. except Exception as e:
  131. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  132. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  133. @classmethod
  134. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  135. try:
  136. while True:
  137. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  138. if user_sheet is None:
  139. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  140. continue
  141. our_user_list = []
  142. for i in range(1, len(user_sheet)):
  143. out_uid = user_sheet[i][2]
  144. user_name = user_sheet[i][3]
  145. our_uid = user_sheet[i][6]
  146. our_user_link = user_sheet[i][7]
  147. if out_uid is None or user_name is None:
  148. Common.logger(log_type, crawler).info("空行\n")
  149. else:
  150. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  151. if our_uid is None:
  152. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  153. out_user_dict = {
  154. "out_uid": out_uid,
  155. "user_name": user_name,
  156. "out_avatar_url": out_user_info["out_avatar_url"],
  157. "out_create_time": '',
  158. "out_tag": '',
  159. "out_play_cnt": 0,
  160. "out_fans": out_user_info["out_fans"],
  161. "out_follow": out_user_info["out_follow"],
  162. "out_friend": 0,
  163. "out_like": out_user_info["out_like"],
  164. "platform": cls.platform,
  165. "tag": cls.tag,
  166. }
  167. our_user_dict = Users.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  168. our_uid = our_user_dict['our_uid']
  169. our_user_link = our_user_dict['our_user_link']
  170. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  171. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  172. our_user_list.append(our_user_dict)
  173. else:
  174. our_user_dict = {
  175. 'out_uid': out_uid,
  176. 'user_name': user_name,
  177. 'our_uid': our_uid,
  178. 'our_user_link': our_user_link,
  179. }
  180. our_user_list.append(our_user_dict)
  181. return our_user_list
  182. except Exception as e:
  183. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  184. @classmethod
  185. def random_signature(cls):
  186. src_digits = string.digits # string_数字
  187. src_uppercase = string.ascii_uppercase # string_大写字母
  188. src_lowercase = string.ascii_lowercase # string_小写字母
  189. digits_num = random.randint(1, 6)
  190. uppercase_num = random.randint(1, 26 - digits_num - 1)
  191. lowercase_num = 26 - (digits_num + uppercase_num)
  192. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  193. src_lowercase, lowercase_num)
  194. random.shuffle(password)
  195. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  196. new_password_start = new_password[0:18]
  197. new_password_end = new_password[-7:]
  198. if new_password[18] == '8':
  199. new_password = new_password_start + 'w' + new_password_end
  200. elif new_password[18] == '9':
  201. new_password = new_password_start + 'x' + new_password_end
  202. elif new_password[18] == '-':
  203. new_password = new_password_start + 'y' + new_password_end
  204. elif new_password[18] == '.':
  205. new_password = new_password_start + 'z' + new_password_end
  206. else:
  207. new_password = new_password_start + 'y' + new_password_end
  208. return new_password
  209. @classmethod
  210. def get_signature(cls, log_type, crawler, out_uid, machine):
  211. try:
  212. # 打印请求配置
  213. ca = DesiredCapabilities.CHROME
  214. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  215. # 不打开浏览器运行
  216. chrome_options = webdriver.ChromeOptions()
  217. chrome_options.add_argument("--headless")
  218. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  219. chrome_options.add_argument("--no-sandbox")
  220. # driver初始化
  221. if machine == 'aliyun' or machine == 'aliyun_hk':
  222. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  223. elif machine == 'macpro':
  224. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  225. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  226. elif machine == 'macair':
  227. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  228. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  229. else:
  230. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  231. driver.implicitly_wait(10)
  232. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  233. time.sleep(3)
  234. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  235. signature = data_src.split("x-signature=")[-1]
  236. return signature
  237. except Exception as e:
  238. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  239. # 获取视频详情
  240. @classmethod
  241. def get_video_url(cls, log_type, crawler, gid):
  242. try:
  243. url = 'https://www.ixigua.com/api/mixVideo/information?'
  244. headers = {
  245. "accept-encoding": "gzip, deflate",
  246. "accept-language": "zh-CN,zh-Hans;q=0.9",
  247. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  248. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  249. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  250. }
  251. params = {
  252. 'mixId': gid,
  253. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  254. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  255. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  256. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  257. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  258. }
  259. cookies = {
  260. 'ixigua-a-s': '1',
  261. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  262. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  263. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  264. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  265. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  266. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  267. '__ac_nonce': '06304878000964fdad287',
  268. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  269. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  270. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  271. '_tea_utm_cache_1300': 'undefined',
  272. 'support_avif': 'false',
  273. 'support_webp': 'false',
  274. 'xiguavideopcwebid': '7134967546256016900',
  275. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  276. }
  277. urllib3.disable_warnings()
  278. s = requests.session()
  279. # max_retries=3 重试3次
  280. s.mount('http://', HTTPAdapter(max_retries=3))
  281. s.mount('https://', HTTPAdapter(max_retries=3))
  282. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies(), timeout=5)
  283. response.close()
  284. if 'data' not in response.json() or response.json()['data'] == '':
  285. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  286. else:
  287. video_info = response.json()['data']['gidInformation']['packerData']['video']
  288. video_url_dict = {}
  289. # video_url
  290. if 'videoResource' not in video_info:
  291. video_url_dict["video_url"] = ''
  292. video_url_dict["audio_url"] = ''
  293. video_url_dict["video_width"] = 0
  294. video_url_dict["video_height"] = 0
  295. elif 'dash_120fps' in video_info['videoResource']:
  296. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  297. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  298. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  299. if len(video_url) % 3 == 1:
  300. video_url += '=='
  301. elif len(video_url) % 3 == 2:
  302. video_url += '='
  303. elif len(audio_url) % 3 == 1:
  304. audio_url += '=='
  305. elif len(audio_url) % 3 == 2:
  306. audio_url += '='
  307. video_url = base64.b64decode(video_url).decode('utf8')
  308. audio_url = base64.b64decode(audio_url).decode('utf8')
  309. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  310. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  311. video_url_dict["video_url"] = video_url
  312. video_url_dict["audio_url"] = audio_url
  313. video_url_dict["video_width"] = video_width
  314. video_url_dict["video_height"] = video_height
  315. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  316. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  317. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  318. if len(video_url) % 3 == 1:
  319. video_url += '=='
  320. elif len(video_url) % 3 == 2:
  321. video_url += '='
  322. elif len(audio_url) % 3 == 1:
  323. audio_url += '=='
  324. elif len(audio_url) % 3 == 2:
  325. audio_url += '='
  326. video_url = base64.b64decode(video_url).decode('utf8')
  327. audio_url = base64.b64decode(audio_url).decode('utf8')
  328. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  329. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  330. video_url_dict["video_url"] = video_url
  331. video_url_dict["audio_url"] = audio_url
  332. video_url_dict["video_width"] = video_width
  333. video_url_dict["video_height"] = video_height
  334. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  335. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  336. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  337. if len(video_url) % 3 == 1:
  338. video_url += '=='
  339. elif len(video_url) % 3 == 2:
  340. video_url += '='
  341. elif len(audio_url) % 3 == 1:
  342. audio_url += '=='
  343. elif len(audio_url) % 3 == 2:
  344. audio_url += '='
  345. video_url = base64.b64decode(video_url).decode('utf8')
  346. audio_url = base64.b64decode(audio_url).decode('utf8')
  347. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  348. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  349. video_url_dict["video_url"] = video_url
  350. video_url_dict["audio_url"] = audio_url
  351. video_url_dict["video_width"] = video_width
  352. video_url_dict["video_height"] = video_height
  353. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  354. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  355. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  356. if len(video_url) % 3 == 1:
  357. video_url += '=='
  358. elif len(video_url) % 3 == 2:
  359. video_url += '='
  360. elif len(audio_url) % 3 == 1:
  361. audio_url += '=='
  362. elif len(audio_url) % 3 == 2:
  363. audio_url += '='
  364. video_url = base64.b64decode(video_url).decode('utf8')
  365. audio_url = base64.b64decode(audio_url).decode('utf8')
  366. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  367. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  368. video_url_dict["video_url"] = video_url
  369. video_url_dict["audio_url"] = audio_url
  370. video_url_dict["video_width"] = video_width
  371. video_url_dict["video_height"] = video_height
  372. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  373. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  374. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  375. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  376. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  377. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  378. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  379. if len(video_url) % 3 == 1:
  380. video_url += '=='
  381. elif len(video_url) % 3 == 2:
  382. video_url += '='
  383. elif len(audio_url) % 3 == 1:
  384. audio_url += '=='
  385. elif len(audio_url) % 3 == 2:
  386. audio_url += '='
  387. video_url = base64.b64decode(video_url).decode('utf8')
  388. audio_url = base64.b64decode(audio_url).decode('utf8')
  389. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  390. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  391. video_url_dict["video_url"] = video_url
  392. video_url_dict["audio_url"] = audio_url
  393. video_url_dict["video_width"] = video_width
  394. video_url_dict["video_height"] = video_height
  395. else:
  396. video_url_dict["video_url"] = ''
  397. video_url_dict["audio_url"] = ''
  398. video_url_dict["video_width"] = 0
  399. video_url_dict["video_height"] = 0
  400. elif 'dash' in video_info['videoResource']:
  401. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  402. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  403. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  404. if len(video_url) % 3 == 1:
  405. video_url += '=='
  406. elif len(video_url) % 3 == 2:
  407. video_url += '='
  408. elif len(audio_url) % 3 == 1:
  409. audio_url += '=='
  410. elif len(audio_url) % 3 == 2:
  411. audio_url += '='
  412. video_url = base64.b64decode(video_url).decode('utf8')
  413. audio_url = base64.b64decode(audio_url).decode('utf8')
  414. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  415. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  416. video_url_dict["video_url"] = video_url
  417. video_url_dict["audio_url"] = audio_url
  418. video_url_dict["video_width"] = video_width
  419. video_url_dict["video_height"] = video_height
  420. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  421. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  422. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  423. if len(video_url) % 3 == 1:
  424. video_url += '=='
  425. elif len(video_url) % 3 == 2:
  426. video_url += '='
  427. elif len(audio_url) % 3 == 1:
  428. audio_url += '=='
  429. elif len(audio_url) % 3 == 2:
  430. audio_url += '='
  431. video_url = base64.b64decode(video_url).decode('utf8')
  432. audio_url = base64.b64decode(audio_url).decode('utf8')
  433. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  434. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  435. video_url_dict["video_url"] = video_url
  436. video_url_dict["audio_url"] = audio_url
  437. video_url_dict["video_width"] = video_width
  438. video_url_dict["video_height"] = video_height
  439. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  440. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  441. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  442. if len(video_url) % 3 == 1:
  443. video_url += '=='
  444. elif len(video_url) % 3 == 2:
  445. video_url += '='
  446. elif len(audio_url) % 3 == 1:
  447. audio_url += '=='
  448. elif len(audio_url) % 3 == 2:
  449. audio_url += '='
  450. video_url = base64.b64decode(video_url).decode('utf8')
  451. audio_url = base64.b64decode(audio_url).decode('utf8')
  452. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  453. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  454. video_url_dict["video_url"] = video_url
  455. video_url_dict["audio_url"] = audio_url
  456. video_url_dict["video_width"] = video_width
  457. video_url_dict["video_height"] = video_height
  458. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  459. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  460. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  461. if len(video_url) % 3 == 1:
  462. video_url += '=='
  463. elif len(video_url) % 3 == 2:
  464. video_url += '='
  465. elif len(audio_url) % 3 == 1:
  466. audio_url += '=='
  467. elif len(audio_url) % 3 == 2:
  468. audio_url += '='
  469. video_url = base64.b64decode(video_url).decode('utf8')
  470. audio_url = base64.b64decode(audio_url).decode('utf8')
  471. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  472. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  473. video_url_dict["video_url"] = video_url
  474. video_url_dict["audio_url"] = audio_url
  475. video_url_dict["video_width"] = video_width
  476. video_url_dict["video_height"] = video_height
  477. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  478. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  479. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  480. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  481. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  482. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  483. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  484. if len(video_url) % 3 == 1:
  485. video_url += '=='
  486. elif len(video_url) % 3 == 2:
  487. video_url += '='
  488. elif len(audio_url) % 3 == 1:
  489. audio_url += '=='
  490. elif len(audio_url) % 3 == 2:
  491. audio_url += '='
  492. video_url = base64.b64decode(video_url).decode('utf8')
  493. audio_url = base64.b64decode(audio_url).decode('utf8')
  494. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  495. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  496. video_url_dict["video_url"] = video_url
  497. video_url_dict["audio_url"] = audio_url
  498. video_url_dict["video_width"] = video_width
  499. video_url_dict["video_height"] = video_height
  500. else:
  501. video_url_dict["video_url"] = ''
  502. video_url_dict["audio_url"] = ''
  503. video_url_dict["video_width"] = 0
  504. video_url_dict["video_height"] = 0
  505. elif 'normal' in video_info['videoResource']:
  506. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  507. video_info['videoResource']['normal']['video_list']:
  508. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  509. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  510. if len(video_url) % 3 == 1:
  511. video_url += '=='
  512. elif len(video_url) % 3 == 2:
  513. video_url += '='
  514. elif len(audio_url) % 3 == 1:
  515. audio_url += '=='
  516. elif len(audio_url) % 3 == 2:
  517. audio_url += '='
  518. video_url = base64.b64decode(video_url).decode('utf8')
  519. audio_url = base64.b64decode(audio_url).decode('utf8')
  520. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  521. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  522. video_url_dict["video_url"] = video_url
  523. video_url_dict["audio_url"] = audio_url
  524. video_url_dict["video_width"] = video_width
  525. video_url_dict["video_height"] = video_height
  526. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  527. video_info['videoResource']['normal']['video_list']:
  528. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  529. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  530. if len(video_url) % 3 == 1:
  531. video_url += '=='
  532. elif len(video_url) % 3 == 2:
  533. video_url += '='
  534. elif len(audio_url) % 3 == 1:
  535. audio_url += '=='
  536. elif len(audio_url) % 3 == 2:
  537. audio_url += '='
  538. video_url = base64.b64decode(video_url).decode('utf8')
  539. audio_url = base64.b64decode(audio_url).decode('utf8')
  540. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  541. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  542. video_url_dict["video_url"] = video_url
  543. video_url_dict["audio_url"] = audio_url
  544. video_url_dict["video_width"] = video_width
  545. video_url_dict["video_height"] = video_height
  546. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  547. video_info['videoResource']['normal']['video_list']:
  548. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  549. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  550. if len(video_url) % 3 == 1:
  551. video_url += '=='
  552. elif len(video_url) % 3 == 2:
  553. video_url += '='
  554. elif len(audio_url) % 3 == 1:
  555. audio_url += '=='
  556. elif len(audio_url) % 3 == 2:
  557. audio_url += '='
  558. video_url = base64.b64decode(video_url).decode('utf8')
  559. audio_url = base64.b64decode(audio_url).decode('utf8')
  560. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  561. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  562. video_url_dict["video_url"] = video_url
  563. video_url_dict["audio_url"] = audio_url
  564. video_url_dict["video_width"] = video_width
  565. video_url_dict["video_height"] = video_height
  566. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  567. video_info['videoResource']['normal']['video_list']:
  568. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  569. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  570. if len(video_url) % 3 == 1:
  571. video_url += '=='
  572. elif len(video_url) % 3 == 2:
  573. video_url += '='
  574. elif len(audio_url) % 3 == 1:
  575. audio_url += '=='
  576. elif len(audio_url) % 3 == 2:
  577. audio_url += '='
  578. video_url = base64.b64decode(video_url).decode('utf8')
  579. audio_url = base64.b64decode(audio_url).decode('utf8')
  580. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  581. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  582. video_url_dict["video_url"] = video_url
  583. video_url_dict["audio_url"] = audio_url
  584. video_url_dict["video_width"] = video_width
  585. video_url_dict["video_height"] = video_height
  586. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  587. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  588. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  589. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  590. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  591. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  592. 'backup_url_1']
  593. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  594. 'backup_url_1']
  595. if len(video_url) % 3 == 1:
  596. video_url += '=='
  597. elif len(video_url) % 3 == 2:
  598. video_url += '='
  599. elif len(audio_url) % 3 == 1:
  600. audio_url += '=='
  601. elif len(audio_url) % 3 == 2:
  602. audio_url += '='
  603. video_url = base64.b64decode(video_url).decode('utf8')
  604. audio_url = base64.b64decode(audio_url).decode('utf8')
  605. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  606. 'vwidth']
  607. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  608. 'vheight']
  609. video_url_dict["video_url"] = video_url
  610. video_url_dict["audio_url"] = audio_url
  611. video_url_dict["video_width"] = video_width
  612. video_url_dict["video_height"] = video_height
  613. else:
  614. video_url_dict["video_url"] = ''
  615. video_url_dict["audio_url"] = ''
  616. video_url_dict["video_width"] = 0
  617. video_url_dict["video_height"] = 0
  618. else:
  619. video_url_dict["video_url"] = ''
  620. video_url_dict["audio_url"] = ''
  621. video_url_dict["video_width"] = 0
  622. video_url_dict["video_height"] = 0
  623. return video_url_dict
  624. except Exception as e:
  625. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  626. @classmethod
  627. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  628. try:
  629. signature = cls.random_signature()
  630. while True:
  631. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  632. params = {
  633. 'to_user_id': str(out_uid),
  634. 'offset': str(cls.offset),
  635. 'limit': '30',
  636. 'maxBehotTime': '0',
  637. 'order': 'new',
  638. 'isHome': '0',
  639. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  640. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  641. '_signature': signature,
  642. }
  643. headers = {
  644. # 'authority': 'www.ixigua.com',
  645. # 'accept': 'application/json, text/plain, */*',
  646. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  647. # 'cache-control': 'no-cache',
  648. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  649. # 'pragma': 'no-cache',
  650. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  651. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  652. # 'sec-ch-ua-mobile': '?0',
  653. # 'sec-ch-ua-platform': '"macOS"',
  654. # 'sec-fetch-dest': 'empty',
  655. # 'sec-fetch-mode': 'cors',
  656. # 'sec-fetch-site': 'same-origin',
  657. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  658. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  659. }
  660. urllib3.disable_warnings()
  661. s = requests.session()
  662. # max_retries=3 重试3次
  663. s.mount('http://', HTTPAdapter(max_retries=3))
  664. s.mount('https://', HTTPAdapter(max_retries=3))
  665. response = s.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False, timeout=5)
  666. response.close()
  667. cls.offset += 30
  668. if response.status_code != 200:
  669. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  670. cls.offset = 0
  671. return
  672. elif 'data' not in response.text:
  673. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  674. cls.offset = 0
  675. return
  676. elif 'videoList' not in response.json()["data"]:
  677. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  678. cls.offset = 0
  679. return
  680. else:
  681. videoList = response.json()['data']['videoList']
  682. for i in range(len(videoList)):
  683. # video_title
  684. if 'title' not in videoList[i]:
  685. video_title = 0
  686. else:
  687. video_title = videoList[i]['title'].strip().replace('手游', '') \
  688. .replace('/', '').replace('\/', '').replace('\n', '')
  689. # video_id
  690. if 'video_id' not in videoList[i]:
  691. video_id = 0
  692. else:
  693. video_id = videoList[i]['video_id']
  694. # gid
  695. if 'gid' not in videoList[i]:
  696. gid = 0
  697. else:
  698. gid = videoList[i]['gid']
  699. # play_cnt
  700. if 'video_detail_info' not in videoList[i]:
  701. play_cnt = 0
  702. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  703. play_cnt = 0
  704. else:
  705. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  706. # comment_cnt
  707. if 'comment_count' not in videoList[i]:
  708. comment_cnt = 0
  709. else:
  710. comment_cnt = videoList[i]['comment_count']
  711. # like_cnt
  712. if 'digg_count' not in videoList[i]:
  713. like_cnt = 0
  714. else:
  715. like_cnt = videoList[i]['digg_count']
  716. # share_cnt
  717. share_cnt = 0
  718. # video_duration
  719. if 'video_duration' not in videoList[i]:
  720. video_duration = 0
  721. else:
  722. video_duration = int(videoList[i]['video_duration'])
  723. # send_time
  724. if 'publish_time' not in videoList[i]:
  725. publish_time = 0
  726. else:
  727. publish_time = videoList[i]['publish_time']
  728. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  729. # is_top
  730. if 'is_top' not in videoList[i]:
  731. is_top = 0
  732. else:
  733. is_top = videoList[i]['is_top']
  734. # user_name
  735. if 'user_info' not in videoList[i]:
  736. user_name = 0
  737. elif 'name' not in videoList[i]['user_info']:
  738. user_name = 0
  739. else:
  740. user_name = videoList[i]['user_info']['name']
  741. # user_id
  742. if 'user_info' not in videoList[i]:
  743. user_id = 0
  744. elif 'user_id' not in videoList[i]['user_info']:
  745. user_id = 0
  746. else:
  747. user_id = videoList[i]['user_info']['user_id']
  748. # avatar_url
  749. if 'user_info' not in videoList[i]:
  750. avatar_url = 0
  751. elif 'avatar_url' not in videoList[i]['user_info']:
  752. avatar_url = 0
  753. else:
  754. avatar_url = videoList[i]['user_info']['avatar_url']
  755. # cover_url
  756. if 'video_detail_info' not in videoList[i]:
  757. cover_url = 0
  758. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  759. cover_url = 0
  760. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  761. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  762. else:
  763. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  764. while True:
  765. rule_dict = cls.get_rule(log_type, crawler)
  766. if rule_dict is None:
  767. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  768. time.sleep(10)
  769. else:
  770. break
  771. if gid == 0 or video_id == 0 or cover_url == 0:
  772. Common.logger(log_type, crawler).info('无效视频\n')
  773. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  774. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  775. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  776. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  777. cls.offset = 0
  778. return
  779. else:
  780. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  781. video_url = video_url_dict["video_url"]
  782. audio_url = video_url_dict["audio_url"]
  783. video_width = video_url_dict["video_width"]
  784. video_height = video_url_dict["video_height"]
  785. video_dict = {'video_title': video_title,
  786. 'video_id': video_id,
  787. 'gid': gid,
  788. 'play_cnt': play_cnt,
  789. 'comment_cnt': comment_cnt,
  790. 'like_cnt': like_cnt,
  791. 'share_cnt': share_cnt,
  792. 'video_width': video_width,
  793. 'video_height': video_height,
  794. 'duration': video_duration,
  795. 'publish_time_stamp': publish_time,
  796. 'publish_time_str': publish_time_str,
  797. 'is_top': is_top,
  798. 'user_name': user_name,
  799. 'user_id': user_id,
  800. 'avatar_url': avatar_url,
  801. 'cover_url': cover_url,
  802. 'audio_url': audio_url,
  803. 'video_url': video_url,
  804. 'session': signature}
  805. for k, v in video_dict.items():
  806. Common.logger(log_type, crawler).info(f"{k}:{v}")
  807. cls.download_publish(log_type=log_type,
  808. crawler=crawler,
  809. video_dict=video_dict,
  810. rule_dict=rule_dict,
  811. strategy=strategy,
  812. our_uid=our_uid,
  813. oss_endpoint=oss_endpoint,
  814. env=env,
  815. machine=machine)
  816. except Exception as e:
  817. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  818. @classmethod
  819. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  820. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  821. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  822. return len(repeat_video)
  823. # 下载 / 上传
  824. @classmethod
  825. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  826. try:
  827. if cls.download_rule(video_dict, rule_dict) is False:
  828. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  829. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  830. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  831. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  832. Common.logger(log_type, crawler).info('视频已下载\n')
  833. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  834. # Common.logger(log_type, crawler).info('视频已下载\n')
  835. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  836. # Common.logger(log_type, crawler).info('视频已下载\n')
  837. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  838. # Common.logger(log_type, crawler).info('视频已下载\n')
  839. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  840. # Common.logger(log_type, crawler).info('视频已存在\n')
  841. else:
  842. # 下载封面
  843. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  844. # 下载视频
  845. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  846. # 下载音频
  847. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  848. # 保存视频信息至txt
  849. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  850. # 合成音视频
  851. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  852. # 上传视频
  853. Common.logger(log_type, crawler).info("开始上传视频...")
  854. our_video_id = Publish.upload_and_publish(log_type=log_type,
  855. crawler=crawler,
  856. strategy=strategy,
  857. our_uid=our_uid,
  858. env=env,
  859. oss_endpoint=oss_endpoint)
  860. if env == 'dev':
  861. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  862. else:
  863. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  864. Common.logger(log_type, crawler).info("视频上传完成")
  865. if our_video_id is None:
  866. # 删除视频文件夹
  867. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  868. return
  869. # 视频写入飞书
  870. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  871. upload_time = int(time.time())
  872. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  873. "定向榜",
  874. video_dict['video_title'],
  875. str(video_dict['video_id']),
  876. our_video_link,
  877. video_dict['gid'],
  878. video_dict['play_cnt'],
  879. video_dict['comment_cnt'],
  880. video_dict['like_cnt'],
  881. video_dict['share_cnt'],
  882. video_dict['duration'],
  883. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  884. video_dict['publish_time_str'],
  885. video_dict['user_name'],
  886. video_dict['user_id'],
  887. video_dict['avatar_url'],
  888. video_dict['cover_url'],
  889. video_dict['video_url'],
  890. video_dict['audio_url']]]
  891. time.sleep(1)
  892. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  893. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  894. # 视频信息保存数据库
  895. insert_sql = f""" insert into crawler_video(video_id,
  896. user_id,
  897. out_user_id,
  898. platform,
  899. strategy,
  900. out_video_id,
  901. video_title,
  902. cover_url,
  903. video_url,
  904. duration,
  905. publish_time,
  906. play_cnt,
  907. crawler_rule,
  908. width,
  909. height)
  910. values({our_video_id},
  911. {our_uid},
  912. "{video_dict['user_id']}",
  913. "{cls.platform}",
  914. "定向爬虫策略",
  915. "{video_dict['video_id']}",
  916. "{video_dict['video_title']}",
  917. "{video_dict['cover_url']}",
  918. "{video_dict['video_url']}",
  919. {int(video_dict['duration'])},
  920. "{video_dict['publish_time_str']}",
  921. {int(video_dict['play_cnt'])},
  922. '{json.dumps(rule_dict)}',
  923. {int(video_dict['video_width'])},
  924. {int(video_dict['video_height'])}) """
  925. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  926. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  927. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  928. except Exception as e:
  929. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  930. @classmethod
  931. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  932. try:
  933. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  934. for user in user_list:
  935. out_uid = user["out_uid"]
  936. user_name = user["user_name"]
  937. our_uid = user["our_uid"]
  938. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  939. cls.get_videolist(log_type=log_type,
  940. crawler=crawler,
  941. strategy=strategy,
  942. our_uid=our_uid,
  943. out_uid=out_uid,
  944. oss_endpoint=oss_endpoint,
  945. env=env,
  946. machine=machine)
  947. cls.offset = 0
  948. time.sleep(3)
  949. except Exception as e:
  950. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  951. if __name__ == '__main__':
  952. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  953. # Follow.get_videolist(log_type="follow",
  954. # crawler="xigua",
  955. # strategy="定向爬虫策略",
  956. # our_uid="6267141",
  957. # out_uid="95420624045",
  958. # oss_endpoint="out",
  959. # env="dev",
  960. # machine="local")
  961. # print(Follow.random_signature())
  962. rule = Follow.get_rule("follow", "xigua")
  963. print(type(rule))
  964. print(type(json.dumps(rule)))
  965. print(json.dumps(rule))
  966. pass