xigua_follow.py 60 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. import requests
  13. import urllib3
  14. from selenium.webdriver import DesiredCapabilities
  15. from selenium.webdriver.chrome.service import Service
  16. from selenium.webdriver.common.by import By
  17. from selenium import webdriver
  18. from lxml import etree
  19. sys.path.append(os.getcwd())
  20. from common.db import MysqlHelper
  21. from common.users import Users
  22. from common.common import Common
  23. from common.feishu import Feishu
  24. from common.publish import Publish
  25. class Follow:
  26. # 个人主页视频翻页参数
  27. offset = 0
  28. platform = "西瓜视频"
  29. tag = "西瓜视频爬虫,定向爬虫策略"
  30. @classmethod
  31. def get_rule(cls, log_type, crawler):
  32. try:
  33. while True:
  34. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  35. if rule_sheet is None:
  36. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  37. time.sleep(10)
  38. continue
  39. rule_dict = {
  40. "play_cnt": int(rule_sheet[1][2]),
  41. "comment_cnt": int(rule_sheet[2][2]),
  42. "like_cnt": int(rule_sheet[3][2]),
  43. "duration": int(rule_sheet[4][2]),
  44. "publish_time": int(rule_sheet[5][2]),
  45. "video_width": int(rule_sheet[6][2]),
  46. "video_height": int(rule_sheet[7][2]),
  47. }
  48. return rule_dict
  49. except Exception as e:
  50. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  51. # 下载规则
  52. @classmethod
  53. def download_rule(cls, video_info_dict, rule_dict):
  54. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  55. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  56. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  57. if video_info_dict['duration'] >= rule_dict['duration']:
  58. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  59. or video_info_dict['video_height'] >= rule_dict['video_height']:
  60. return True
  61. else:
  62. return False
  63. else:
  64. return False
  65. else:
  66. return False
  67. else:
  68. return False
  69. else:
  70. return False
  71. # 过滤词库
  72. @classmethod
  73. def filter_words(cls, log_type, crawler):
  74. try:
  75. while True:
  76. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  77. if filter_words_sheet is None:
  78. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  79. continue
  80. filter_words_list = []
  81. for x in filter_words_sheet:
  82. for y in x:
  83. if y is None:
  84. pass
  85. else:
  86. filter_words_list.append(y)
  87. return filter_words_list
  88. except Exception as e:
  89. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  90. @classmethod
  91. def get_out_user_info(cls, log_type, crawler, out_uid):
  92. try:
  93. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  94. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  95. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  96. url = f"https://www.ixigua.com/home/{out_uid}"
  97. urllib3.disable_warnings()
  98. response = requests.get(url=url, headers=headers, proxies=Common.tunnel_proxies(), verify=False).text
  99. html = etree.HTML(response)
  100. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  101. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  102. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  103. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  104. if "万" in out_follow_str:
  105. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  106. else:
  107. out_follow = int(out_follow_str.replace(",", ""))
  108. if "万" in out_fans_str:
  109. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  110. else:
  111. out_fans = int(out_fans_str.replace(",", ""))
  112. if "万" in out_like_str:
  113. out_like = int(float(out_like_str.split("万")[0])*10000)
  114. else:
  115. out_like = int(out_like_str.replace(",", ""))
  116. out_user_dict = {
  117. "out_follow": out_follow,
  118. "out_fans": out_fans,
  119. "out_like": out_like,
  120. "out_avatar_url": out_avatar_url,
  121. }
  122. # for k, v in out_user_dict.items():
  123. # print(f"{k}:{v}")
  124. return out_user_dict
  125. except Exception as e:
  126. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  127. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  128. @classmethod
  129. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  130. try:
  131. while True:
  132. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  133. if user_sheet is None:
  134. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  135. continue
  136. our_user_list = []
  137. for i in range(1, len(user_sheet)):
  138. out_uid = user_sheet[i][2]
  139. user_name = user_sheet[i][3]
  140. our_uid = user_sheet[i][6]
  141. our_user_link = user_sheet[i][7]
  142. if out_uid is None or user_name is None:
  143. Common.logger(log_type, crawler).info("空行\n")
  144. else:
  145. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  146. if our_uid is None:
  147. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  148. out_user_dict = {
  149. "out_uid": out_uid,
  150. "user_name": user_name,
  151. "out_avatar_url": out_user_info["out_avatar_url"],
  152. "out_create_time": '',
  153. "out_tag": '',
  154. "out_play_cnt": 0,
  155. "out_fans": out_user_info["out_fans"],
  156. "out_follow": out_user_info["out_follow"],
  157. "out_friend": 0,
  158. "out_like": out_user_info["out_like"],
  159. "platform": cls.platform,
  160. "tag": cls.tag,
  161. }
  162. our_user_dict = Users.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  163. our_uid = our_user_dict['our_uid']
  164. our_user_link = our_user_dict['our_user_link']
  165. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  166. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  167. our_user_list.append(our_user_dict)
  168. else:
  169. our_user_dict = {
  170. 'out_uid': out_uid,
  171. 'user_name': user_name,
  172. 'our_uid': our_uid,
  173. 'our_user_link': our_user_link,
  174. }
  175. our_user_list.append(our_user_dict)
  176. return our_user_list
  177. except Exception as e:
  178. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  179. @classmethod
  180. def random_signature(cls):
  181. src_digits = string.digits # string_数字
  182. src_uppercase = string.ascii_uppercase # string_大写字母
  183. src_lowercase = string.ascii_lowercase # string_小写字母
  184. digits_num = random.randint(1, 6)
  185. uppercase_num = random.randint(1, 26 - digits_num - 1)
  186. lowercase_num = 26 - (digits_num + uppercase_num)
  187. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  188. src_lowercase, lowercase_num)
  189. random.shuffle(password)
  190. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  191. new_password_start = new_password[0:18]
  192. new_password_end = new_password[-7:]
  193. if new_password[18] == '8':
  194. new_password = new_password_start + 'w' + new_password_end
  195. elif new_password[18] == '9':
  196. new_password = new_password_start + 'x' + new_password_end
  197. elif new_password[18] == '-':
  198. new_password = new_password_start + 'y' + new_password_end
  199. elif new_password[18] == '.':
  200. new_password = new_password_start + 'z' + new_password_end
  201. else:
  202. new_password = new_password_start + 'y' + new_password_end
  203. return new_password
  204. @classmethod
  205. def get_signature(cls, log_type, crawler, out_uid, machine):
  206. try:
  207. # 打印请求配置
  208. ca = DesiredCapabilities.CHROME
  209. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  210. # 不打开浏览器运行
  211. chrome_options = webdriver.ChromeOptions()
  212. chrome_options.add_argument("--headless")
  213. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  214. chrome_options.add_argument("--no-sandbox")
  215. # driver初始化
  216. if machine == 'aliyun' or machine == 'aliyun_hk':
  217. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  218. elif machine == 'macpro':
  219. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  220. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  221. elif machine == 'macair':
  222. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  223. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  224. else:
  225. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  226. driver.implicitly_wait(10)
  227. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  228. time.sleep(3)
  229. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  230. signature = data_src.split("x-signature=")[-1]
  231. return signature
  232. except Exception as e:
  233. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  234. # 获取视频详情
  235. @classmethod
  236. def get_video_url(cls, log_type, crawler, gid):
  237. try:
  238. url = 'https://www.ixigua.com/api/mixVideo/information?'
  239. headers = {
  240. "accept-encoding": "gzip, deflate",
  241. "accept-language": "zh-CN,zh-Hans;q=0.9",
  242. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  243. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  244. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  245. }
  246. params = {
  247. 'mixId': gid,
  248. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  249. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  250. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  251. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  252. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  253. }
  254. cookies = {
  255. 'ixigua-a-s': '1',
  256. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  257. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  258. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  259. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  260. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  261. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  262. '__ac_nonce': '06304878000964fdad287',
  263. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  264. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  265. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  266. '_tea_utm_cache_1300': 'undefined',
  267. 'support_avif': 'false',
  268. 'support_webp': 'false',
  269. 'xiguavideopcwebid': '7134967546256016900',
  270. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  271. }
  272. urllib3.disable_warnings()
  273. response = requests.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies())
  274. response.close()
  275. if 'data' not in response.json() or response.json()['data'] == '':
  276. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  277. else:
  278. video_info = response.json()['data']['gidInformation']['packerData']['video']
  279. video_url_dict = {}
  280. # video_url
  281. if 'videoResource' not in video_info:
  282. video_url_dict["video_url"] = ''
  283. video_url_dict["audio_url"] = ''
  284. video_url_dict["video_width"] = 0
  285. video_url_dict["video_height"] = 0
  286. elif 'dash_120fps' in video_info['videoResource']:
  287. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  288. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  289. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  290. if len(video_url) % 3 == 1:
  291. video_url += '=='
  292. elif len(video_url) % 3 == 2:
  293. video_url += '='
  294. elif len(audio_url) % 3 == 1:
  295. audio_url += '=='
  296. elif len(audio_url) % 3 == 2:
  297. audio_url += '='
  298. video_url = base64.b64decode(video_url).decode('utf8')
  299. audio_url = base64.b64decode(audio_url).decode('utf8')
  300. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  301. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  302. video_url_dict["video_url"] = video_url
  303. video_url_dict["audio_url"] = audio_url
  304. video_url_dict["video_width"] = video_width
  305. video_url_dict["video_height"] = video_height
  306. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  307. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  308. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  309. if len(video_url) % 3 == 1:
  310. video_url += '=='
  311. elif len(video_url) % 3 == 2:
  312. video_url += '='
  313. elif len(audio_url) % 3 == 1:
  314. audio_url += '=='
  315. elif len(audio_url) % 3 == 2:
  316. audio_url += '='
  317. video_url = base64.b64decode(video_url).decode('utf8')
  318. audio_url = base64.b64decode(audio_url).decode('utf8')
  319. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  320. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  321. video_url_dict["video_url"] = video_url
  322. video_url_dict["audio_url"] = audio_url
  323. video_url_dict["video_width"] = video_width
  324. video_url_dict["video_height"] = video_height
  325. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  326. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  327. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  328. if len(video_url) % 3 == 1:
  329. video_url += '=='
  330. elif len(video_url) % 3 == 2:
  331. video_url += '='
  332. elif len(audio_url) % 3 == 1:
  333. audio_url += '=='
  334. elif len(audio_url) % 3 == 2:
  335. audio_url += '='
  336. video_url = base64.b64decode(video_url).decode('utf8')
  337. audio_url = base64.b64decode(audio_url).decode('utf8')
  338. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  339. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  340. video_url_dict["video_url"] = video_url
  341. video_url_dict["audio_url"] = audio_url
  342. video_url_dict["video_width"] = video_width
  343. video_url_dict["video_height"] = video_height
  344. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  345. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  346. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  347. if len(video_url) % 3 == 1:
  348. video_url += '=='
  349. elif len(video_url) % 3 == 2:
  350. video_url += '='
  351. elif len(audio_url) % 3 == 1:
  352. audio_url += '=='
  353. elif len(audio_url) % 3 == 2:
  354. audio_url += '='
  355. video_url = base64.b64decode(video_url).decode('utf8')
  356. audio_url = base64.b64decode(audio_url).decode('utf8')
  357. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  358. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  359. video_url_dict["video_url"] = video_url
  360. video_url_dict["audio_url"] = audio_url
  361. video_url_dict["video_width"] = video_width
  362. video_url_dict["video_height"] = video_height
  363. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  364. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  365. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  366. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  367. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  368. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  369. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  370. if len(video_url) % 3 == 1:
  371. video_url += '=='
  372. elif len(video_url) % 3 == 2:
  373. video_url += '='
  374. elif len(audio_url) % 3 == 1:
  375. audio_url += '=='
  376. elif len(audio_url) % 3 == 2:
  377. audio_url += '='
  378. video_url = base64.b64decode(video_url).decode('utf8')
  379. audio_url = base64.b64decode(audio_url).decode('utf8')
  380. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  381. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  382. video_url_dict["video_url"] = video_url
  383. video_url_dict["audio_url"] = audio_url
  384. video_url_dict["video_width"] = video_width
  385. video_url_dict["video_height"] = video_height
  386. else:
  387. video_url_dict["video_url"] = ''
  388. video_url_dict["audio_url"] = ''
  389. video_url_dict["video_width"] = 0
  390. video_url_dict["video_height"] = 0
  391. elif 'dash' in video_info['videoResource']:
  392. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  393. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  394. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  395. if len(video_url) % 3 == 1:
  396. video_url += '=='
  397. elif len(video_url) % 3 == 2:
  398. video_url += '='
  399. elif len(audio_url) % 3 == 1:
  400. audio_url += '=='
  401. elif len(audio_url) % 3 == 2:
  402. audio_url += '='
  403. video_url = base64.b64decode(video_url).decode('utf8')
  404. audio_url = base64.b64decode(audio_url).decode('utf8')
  405. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  406. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  407. video_url_dict["video_url"] = video_url
  408. video_url_dict["audio_url"] = audio_url
  409. video_url_dict["video_width"] = video_width
  410. video_url_dict["video_height"] = video_height
  411. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  412. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  413. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  414. if len(video_url) % 3 == 1:
  415. video_url += '=='
  416. elif len(video_url) % 3 == 2:
  417. video_url += '='
  418. elif len(audio_url) % 3 == 1:
  419. audio_url += '=='
  420. elif len(audio_url) % 3 == 2:
  421. audio_url += '='
  422. video_url = base64.b64decode(video_url).decode('utf8')
  423. audio_url = base64.b64decode(audio_url).decode('utf8')
  424. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  425. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  426. video_url_dict["video_url"] = video_url
  427. video_url_dict["audio_url"] = audio_url
  428. video_url_dict["video_width"] = video_width
  429. video_url_dict["video_height"] = video_height
  430. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  431. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  432. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  433. if len(video_url) % 3 == 1:
  434. video_url += '=='
  435. elif len(video_url) % 3 == 2:
  436. video_url += '='
  437. elif len(audio_url) % 3 == 1:
  438. audio_url += '=='
  439. elif len(audio_url) % 3 == 2:
  440. audio_url += '='
  441. video_url = base64.b64decode(video_url).decode('utf8')
  442. audio_url = base64.b64decode(audio_url).decode('utf8')
  443. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  444. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  445. video_url_dict["video_url"] = video_url
  446. video_url_dict["audio_url"] = audio_url
  447. video_url_dict["video_width"] = video_width
  448. video_url_dict["video_height"] = video_height
  449. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  450. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  451. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  452. if len(video_url) % 3 == 1:
  453. video_url += '=='
  454. elif len(video_url) % 3 == 2:
  455. video_url += '='
  456. elif len(audio_url) % 3 == 1:
  457. audio_url += '=='
  458. elif len(audio_url) % 3 == 2:
  459. audio_url += '='
  460. video_url = base64.b64decode(video_url).decode('utf8')
  461. audio_url = base64.b64decode(audio_url).decode('utf8')
  462. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  463. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  464. video_url_dict["video_url"] = video_url
  465. video_url_dict["audio_url"] = audio_url
  466. video_url_dict["video_width"] = video_width
  467. video_url_dict["video_height"] = video_height
  468. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  469. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  470. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  471. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  472. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  473. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  474. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  475. if len(video_url) % 3 == 1:
  476. video_url += '=='
  477. elif len(video_url) % 3 == 2:
  478. video_url += '='
  479. elif len(audio_url) % 3 == 1:
  480. audio_url += '=='
  481. elif len(audio_url) % 3 == 2:
  482. audio_url += '='
  483. video_url = base64.b64decode(video_url).decode('utf8')
  484. audio_url = base64.b64decode(audio_url).decode('utf8')
  485. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  486. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  487. video_url_dict["video_url"] = video_url
  488. video_url_dict["audio_url"] = audio_url
  489. video_url_dict["video_width"] = video_width
  490. video_url_dict["video_height"] = video_height
  491. else:
  492. video_url_dict["video_url"] = ''
  493. video_url_dict["audio_url"] = ''
  494. video_url_dict["video_width"] = 0
  495. video_url_dict["video_height"] = 0
  496. elif 'normal' in video_info['videoResource']:
  497. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  498. video_info['videoResource']['normal']['video_list']:
  499. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  500. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  501. if len(video_url) % 3 == 1:
  502. video_url += '=='
  503. elif len(video_url) % 3 == 2:
  504. video_url += '='
  505. elif len(audio_url) % 3 == 1:
  506. audio_url += '=='
  507. elif len(audio_url) % 3 == 2:
  508. audio_url += '='
  509. video_url = base64.b64decode(video_url).decode('utf8')
  510. audio_url = base64.b64decode(audio_url).decode('utf8')
  511. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  512. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  513. video_url_dict["video_url"] = video_url
  514. video_url_dict["audio_url"] = audio_url
  515. video_url_dict["video_width"] = video_width
  516. video_url_dict["video_height"] = video_height
  517. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  518. video_info['videoResource']['normal']['video_list']:
  519. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  520. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  521. if len(video_url) % 3 == 1:
  522. video_url += '=='
  523. elif len(video_url) % 3 == 2:
  524. video_url += '='
  525. elif len(audio_url) % 3 == 1:
  526. audio_url += '=='
  527. elif len(audio_url) % 3 == 2:
  528. audio_url += '='
  529. video_url = base64.b64decode(video_url).decode('utf8')
  530. audio_url = base64.b64decode(audio_url).decode('utf8')
  531. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  532. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  533. video_url_dict["video_url"] = video_url
  534. video_url_dict["audio_url"] = audio_url
  535. video_url_dict["video_width"] = video_width
  536. video_url_dict["video_height"] = video_height
  537. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  538. video_info['videoResource']['normal']['video_list']:
  539. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  540. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  541. if len(video_url) % 3 == 1:
  542. video_url += '=='
  543. elif len(video_url) % 3 == 2:
  544. video_url += '='
  545. elif len(audio_url) % 3 == 1:
  546. audio_url += '=='
  547. elif len(audio_url) % 3 == 2:
  548. audio_url += '='
  549. video_url = base64.b64decode(video_url).decode('utf8')
  550. audio_url = base64.b64decode(audio_url).decode('utf8')
  551. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  552. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  553. video_url_dict["video_url"] = video_url
  554. video_url_dict["audio_url"] = audio_url
  555. video_url_dict["video_width"] = video_width
  556. video_url_dict["video_height"] = video_height
  557. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  558. video_info['videoResource']['normal']['video_list']:
  559. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  560. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  561. if len(video_url) % 3 == 1:
  562. video_url += '=='
  563. elif len(video_url) % 3 == 2:
  564. video_url += '='
  565. elif len(audio_url) % 3 == 1:
  566. audio_url += '=='
  567. elif len(audio_url) % 3 == 2:
  568. audio_url += '='
  569. video_url = base64.b64decode(video_url).decode('utf8')
  570. audio_url = base64.b64decode(audio_url).decode('utf8')
  571. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  572. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  573. video_url_dict["video_url"] = video_url
  574. video_url_dict["audio_url"] = audio_url
  575. video_url_dict["video_width"] = video_width
  576. video_url_dict["video_height"] = video_height
  577. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  578. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  579. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  580. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  581. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  582. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  583. 'backup_url_1']
  584. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  585. 'backup_url_1']
  586. if len(video_url) % 3 == 1:
  587. video_url += '=='
  588. elif len(video_url) % 3 == 2:
  589. video_url += '='
  590. elif len(audio_url) % 3 == 1:
  591. audio_url += '=='
  592. elif len(audio_url) % 3 == 2:
  593. audio_url += '='
  594. video_url = base64.b64decode(video_url).decode('utf8')
  595. audio_url = base64.b64decode(audio_url).decode('utf8')
  596. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  597. 'vwidth']
  598. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  599. 'vheight']
  600. video_url_dict["video_url"] = video_url
  601. video_url_dict["audio_url"] = audio_url
  602. video_url_dict["video_width"] = video_width
  603. video_url_dict["video_height"] = video_height
  604. else:
  605. video_url_dict["video_url"] = ''
  606. video_url_dict["audio_url"] = ''
  607. video_url_dict["video_width"] = 0
  608. video_url_dict["video_height"] = 0
  609. else:
  610. video_url_dict["video_url"] = ''
  611. video_url_dict["audio_url"] = ''
  612. video_url_dict["video_width"] = 0
  613. video_url_dict["video_height"] = 0
  614. return video_url_dict
  615. except Exception as e:
  616. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  617. @classmethod
  618. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  619. try:
  620. signature = cls.random_signature()
  621. while True:
  622. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  623. params = {
  624. 'to_user_id': str(out_uid),
  625. 'offset': str(cls.offset),
  626. 'limit': '30',
  627. 'maxBehotTime': '0',
  628. 'order': 'new',
  629. 'isHome': '0',
  630. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  631. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  632. '_signature': signature,
  633. }
  634. headers = {
  635. # 'authority': 'www.ixigua.com',
  636. # 'accept': 'application/json, text/plain, */*',
  637. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  638. # 'cache-control': 'no-cache',
  639. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  640. # 'pragma': 'no-cache',
  641. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  642. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  643. # 'sec-ch-ua-mobile': '?0',
  644. # 'sec-ch-ua-platform': '"macOS"',
  645. # 'sec-fetch-dest': 'empty',
  646. # 'sec-fetch-mode': 'cors',
  647. # 'sec-fetch-site': 'same-origin',
  648. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  649. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  650. }
  651. urllib3.disable_warnings()
  652. response = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False)
  653. response.close()
  654. cls.offset += 30
  655. if response.status_code != 200:
  656. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  657. cls.offset = 0
  658. return
  659. elif 'data' not in response.text:
  660. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  661. cls.offset = 0
  662. return
  663. elif 'videoList' not in response.json()["data"]:
  664. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  665. cls.offset = 0
  666. return
  667. else:
  668. videoList = response.json()['data']['videoList']
  669. for i in range(len(videoList)):
  670. # video_title
  671. if 'title' not in videoList[i]:
  672. video_title = 0
  673. else:
  674. video_title = videoList[i]['title'].strip().replace('手游', '') \
  675. .replace('/', '').replace('\/', '').replace('\n', '')
  676. # video_id
  677. if 'video_id' not in videoList[i]:
  678. video_id = 0
  679. else:
  680. video_id = videoList[i]['video_id']
  681. # gid
  682. if 'gid' not in videoList[i]:
  683. gid = 0
  684. else:
  685. gid = videoList[i]['gid']
  686. # play_cnt
  687. if 'video_detail_info' not in videoList[i]:
  688. play_cnt = 0
  689. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  690. play_cnt = 0
  691. else:
  692. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  693. # comment_cnt
  694. if 'comment_count' not in videoList[i]:
  695. comment_cnt = 0
  696. else:
  697. comment_cnt = videoList[i]['comment_count']
  698. # like_cnt
  699. if 'digg_count' not in videoList[i]:
  700. like_cnt = 0
  701. else:
  702. like_cnt = videoList[i]['digg_count']
  703. # share_cnt
  704. share_cnt = 0
  705. # video_duration
  706. if 'video_duration' not in videoList[i]:
  707. video_duration = 0
  708. else:
  709. video_duration = int(videoList[i]['video_duration'])
  710. # send_time
  711. if 'publish_time' not in videoList[i]:
  712. publish_time = 0
  713. else:
  714. publish_time = videoList[i]['publish_time']
  715. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  716. # is_top
  717. if 'is_top' not in videoList[i]:
  718. is_top = 0
  719. else:
  720. is_top = videoList[i]['is_top']
  721. # user_name
  722. if 'user_info' not in videoList[i]:
  723. user_name = 0
  724. elif 'name' not in videoList[i]['user_info']:
  725. user_name = 0
  726. else:
  727. user_name = videoList[i]['user_info']['name']
  728. # user_id
  729. if 'user_info' not in videoList[i]:
  730. user_id = 0
  731. elif 'user_id' not in videoList[i]['user_info']:
  732. user_id = 0
  733. else:
  734. user_id = videoList[i]['user_info']['user_id']
  735. # avatar_url
  736. if 'user_info' not in videoList[i]:
  737. avatar_url = 0
  738. elif 'avatar_url' not in videoList[i]['user_info']:
  739. avatar_url = 0
  740. else:
  741. avatar_url = videoList[i]['user_info']['avatar_url']
  742. # cover_url
  743. if 'video_detail_info' not in videoList[i]:
  744. cover_url = 0
  745. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  746. cover_url = 0
  747. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  748. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  749. else:
  750. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  751. while True:
  752. rule_dict = cls.get_rule(log_type, crawler)
  753. if rule_dict is None:
  754. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  755. time.sleep(10)
  756. else:
  757. break
  758. if gid == 0 or video_id == 0 or cover_url == 0:
  759. Common.logger(log_type, crawler).info('无效视频\n')
  760. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  761. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  762. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  763. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  764. cls.offset = 0
  765. return
  766. else:
  767. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  768. video_url = video_url_dict["video_url"]
  769. audio_url = video_url_dict["audio_url"]
  770. video_width = video_url_dict["video_width"]
  771. video_height = video_url_dict["video_height"]
  772. video_dict = {'video_title': video_title,
  773. 'video_id': video_id,
  774. 'gid': gid,
  775. 'play_cnt': play_cnt,
  776. 'comment_cnt': comment_cnt,
  777. 'like_cnt': like_cnt,
  778. 'share_cnt': share_cnt,
  779. 'video_width': video_width,
  780. 'video_height': video_height,
  781. 'duration': video_duration,
  782. 'publish_time_stamp': publish_time,
  783. 'publish_time_str': publish_time_str,
  784. 'is_top': is_top,
  785. 'user_name': user_name,
  786. 'user_id': user_id,
  787. 'avatar_url': avatar_url,
  788. 'cover_url': cover_url,
  789. 'audio_url': audio_url,
  790. 'video_url': video_url,
  791. 'session': signature}
  792. for k, v in video_dict.items():
  793. Common.logger(log_type, crawler).info(f"{k}:{v}")
  794. cls.download_publish(log_type=log_type,
  795. crawler=crawler,
  796. video_dict=video_dict,
  797. rule_dict=rule_dict,
  798. strategy=strategy,
  799. our_uid=our_uid,
  800. oss_endpoint=oss_endpoint,
  801. env=env,
  802. machine=machine)
  803. except Exception as e:
  804. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  805. @classmethod
  806. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  807. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  808. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  809. return len(repeat_video)
  810. # 下载 / 上传
  811. @classmethod
  812. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  813. try:
  814. if cls.download_rule(video_dict, rule_dict) is False:
  815. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  816. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  817. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  818. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  819. Common.logger(log_type, crawler).info('视频已下载\n')
  820. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  821. # Common.logger(log_type, crawler).info('视频已下载\n')
  822. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  823. # Common.logger(log_type, crawler).info('视频已下载\n')
  824. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  825. # Common.logger(log_type, crawler).info('视频已下载\n')
  826. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  827. # Common.logger(log_type, crawler).info('视频已存在\n')
  828. else:
  829. # 下载封面
  830. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  831. # 下载视频
  832. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  833. # 下载音频
  834. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  835. # 保存视频信息至txt
  836. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  837. # 合成音视频
  838. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  839. # 上传视频
  840. Common.logger(log_type, crawler).info("开始上传视频...")
  841. our_video_id = Publish.upload_and_publish(log_type=log_type,
  842. crawler=crawler,
  843. strategy=strategy,
  844. our_uid=our_uid,
  845. env=env,
  846. oss_endpoint=oss_endpoint)
  847. if env == 'dev':
  848. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  849. else:
  850. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  851. Common.logger(log_type, crawler).info("视频上传完成")
  852. if our_video_id is None:
  853. # 删除视频文件夹
  854. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  855. return
  856. # 视频写入飞书
  857. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  858. upload_time = int(time.time())
  859. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  860. "定向榜",
  861. video_dict['video_title'],
  862. str(video_dict['video_id']),
  863. our_video_link,
  864. video_dict['gid'],
  865. video_dict['play_cnt'],
  866. video_dict['comment_cnt'],
  867. video_dict['like_cnt'],
  868. video_dict['share_cnt'],
  869. video_dict['duration'],
  870. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  871. video_dict['publish_time_str'],
  872. video_dict['user_name'],
  873. video_dict['user_id'],
  874. video_dict['avatar_url'],
  875. video_dict['cover_url'],
  876. video_dict['video_url'],
  877. video_dict['audio_url']]]
  878. time.sleep(1)
  879. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  880. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  881. # 视频信息保存数据库
  882. insert_sql = f""" insert into crawler_video(video_id,
  883. user_id,
  884. out_user_id,
  885. platform,
  886. strategy,
  887. out_video_id,
  888. video_title,
  889. cover_url,
  890. video_url,
  891. duration,
  892. publish_time,
  893. play_cnt,
  894. crawler_rule,
  895. width,
  896. height)
  897. values({our_video_id},
  898. {our_uid},
  899. "{video_dict['user_id']}",
  900. "{cls.platform}",
  901. "定向爬虫策略",
  902. "{video_dict['video_id']}",
  903. "{video_dict['video_title']}",
  904. "{video_dict['cover_url']}",
  905. "{video_dict['video_url']}",
  906. {int(video_dict['duration'])},
  907. "{video_dict['publish_time_str']}",
  908. {int(video_dict['play_cnt'])},
  909. '{json.dumps(rule_dict)}',
  910. {int(video_dict['video_width'])},
  911. {int(video_dict['video_height'])}) """
  912. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  913. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  914. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  915. except Exception as e:
  916. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  917. @classmethod
  918. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  919. try:
  920. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  921. for user in user_list:
  922. out_uid = user["out_uid"]
  923. user_name = user["user_name"]
  924. our_uid = user["our_uid"]
  925. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  926. cls.get_videolist(log_type=log_type,
  927. crawler=crawler,
  928. strategy=strategy,
  929. our_uid=our_uid,
  930. out_uid=out_uid,
  931. oss_endpoint=oss_endpoint,
  932. env=env,
  933. machine=machine)
  934. cls.offset = 0
  935. time.sleep(3)
  936. except Exception as e:
  937. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  938. if __name__ == '__main__':
  939. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  940. # Follow.get_videolist(log_type="follow",
  941. # crawler="xigua",
  942. # strategy="定向爬虫策略",
  943. # our_uid="6267141",
  944. # out_uid="95420624045",
  945. # oss_endpoint="out",
  946. # env="dev",
  947. # machine="local")
  948. # print(Follow.random_signature())
  949. rule = Follow.get_rule("follow", "xigua")
  950. print(type(rule))
  951. print(type(json.dumps(rule)))
  952. print(json.dumps(rule))
  953. pass