xigua_follow.py 59 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. import requests
  13. import urllib3
  14. from selenium.webdriver import DesiredCapabilities
  15. from selenium.webdriver.chrome.service import Service
  16. from selenium.webdriver.common.by import By
  17. from selenium import webdriver
  18. from lxml import etree
  19. sys.path.append(os.getcwd())
  20. from common.db import MysqlHelper
  21. from common.users import Users
  22. from common.common import Common
  23. from common.feishu import Feishu
  24. from common.publish import Publish
  25. class Follow:
  26. # 个人主页视频翻页参数
  27. offset = 0
  28. platform = "西瓜视频"
  29. tag = "西瓜视频爬虫,定向爬虫策略"
  30. @classmethod
  31. def get_rule(cls, log_type, crawler):
  32. try:
  33. while True:
  34. rule_sheet = Feishu.get_values_batch(log_type, crawler, "4kxd31")
  35. if rule_sheet is None:
  36. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  37. time.sleep(10)
  38. continue
  39. rule_dict = {
  40. "play_cnt": int(rule_sheet[1][2]),
  41. "comment_cnt": int(rule_sheet[2][2]),
  42. "like_cnt": int(rule_sheet[3][2]),
  43. "duration": int(rule_sheet[4][2]),
  44. "publish_time": int(rule_sheet[5][2]),
  45. "video_width": int(rule_sheet[6][2]),
  46. "video_height": int(rule_sheet[7][2]),
  47. }
  48. return rule_dict
  49. except Exception as e:
  50. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  51. # 下载规则
  52. @classmethod
  53. def download_rule(cls, video_info_dict, rule_dict):
  54. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  55. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  56. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  57. if video_info_dict['duration'] >= rule_dict['duration']:
  58. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  59. or video_info_dict['video_height'] >= rule_dict['video_height']:
  60. return True
  61. else:
  62. return False
  63. else:
  64. return False
  65. else:
  66. return False
  67. else:
  68. return False
  69. else:
  70. return False
  71. # 过滤词库
  72. @classmethod
  73. def filter_words(cls, log_type, crawler):
  74. try:
  75. while True:
  76. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  77. if filter_words_sheet is None:
  78. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  79. continue
  80. filter_words_list = []
  81. for x in filter_words_sheet:
  82. for y in x:
  83. if y is None:
  84. pass
  85. else:
  86. filter_words_list.append(y)
  87. return filter_words_list
  88. except Exception as e:
  89. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  90. @classmethod
  91. def get_out_user_info(cls, log_type, crawler, out_uid):
  92. try:
  93. headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  94. 'referer': f'https://www.ixigua.com/home/{out_uid}',
  95. 'Cookie': f'ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; __ac_signature={cls.random_signature()}; MONITOR_WEB_ID=67cb5099-a022-4ec3-bb8e-c4de6ba51dd0; s_v_web_id=verify_lef4i99x_32SosrdH_Qrtk_4LJn_8S7q_fhu16xe3s8ZV; tt_scid=QLJjPuHf6wxVqu6IIq6gHiJXQpVrCwrdhjH2zpm7-E3ZniE1RXBcP6M8b41FJOdo41e1; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1677047013%7C5866a444e5ae10a9df8c11551db75010fb77b657f214ccf84e503fae8d313d09; msToken=PerXJcDdIsZ6zXkGITsftXX4mDaVaW21GuqtzSVdctH46oXXT2GcELIs9f0XW2hunRzP6KVHLZaYElRvNYflLKUXih7lC27XKxs3HjdZiXPK9NQaoKbLfA==; ixigua-a-s=1',}
  96. url = f"https://www.ixigua.com/home/{out_uid}"
  97. response = requests.get(url=url, headers=headers, proxies=Common.tunnel_proxies()).text
  98. html = etree.HTML(response)
  99. out_follow_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[1]/span')[0].text.encode('raw_unicode_escape').decode()
  100. out_fans_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[2]/span')[0].text.encode('raw_unicode_escape').decode()
  101. out_like_str = html.xpath('//div[@class="userDetailV3__header__detail2"]/*[3]/span')[0].text.encode('raw_unicode_escape').decode()
  102. out_avatar_url = f"""https:{html.xpath('//span[@class="component-avatar__inner"]//img/@src')[0]}"""
  103. if "万" in out_follow_str:
  104. out_follow = int(float(out_follow_str.split("万")[0])*10000)
  105. else:
  106. out_follow = int(out_follow_str.replace(",", ""))
  107. if "万" in out_fans_str:
  108. out_fans = int(float(out_fans_str.split("万")[0])*10000)
  109. else:
  110. out_fans = int(out_fans_str.replace(",", ""))
  111. if "万" in out_like_str:
  112. out_like = int(float(out_like_str.split("万")[0])*10000)
  113. else:
  114. out_like = int(out_like_str.replace(",", ""))
  115. out_user_dict = {
  116. "out_follow": out_follow,
  117. "out_fans": out_fans,
  118. "out_like": out_like,
  119. "out_avatar_url": out_avatar_url,
  120. }
  121. # for k, v in out_user_dict.items():
  122. # print(f"{k}:{v}")
  123. return out_user_dict
  124. except Exception as e:
  125. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  126. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  127. @classmethod
  128. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  129. try:
  130. while True:
  131. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  132. if user_sheet is None:
  133. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  134. continue
  135. our_user_list = []
  136. for i in range(1, len(user_sheet)):
  137. out_uid = user_sheet[i][2]
  138. user_name = user_sheet[i][3]
  139. our_uid = user_sheet[i][6]
  140. our_user_link = user_sheet[i][7]
  141. if out_uid is None or user_name is None:
  142. Common.logger(log_type, crawler).info("空行\n")
  143. else:
  144. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  145. if our_uid is None:
  146. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  147. out_user_dict = {
  148. "out_uid": out_uid,
  149. "user_name": user_name,
  150. "out_avatar_url": out_user_info["out_avatar_url"],
  151. "out_create_time": '',
  152. "out_tag": '',
  153. "out_play_cnt": 0,
  154. "out_fans": out_user_info["out_fans"],
  155. "out_follow": out_user_info["out_follow"],
  156. "out_friend": 0,
  157. "out_like": out_user_info["out_like"],
  158. "platform": cls.platform,
  159. "tag": cls.tag,
  160. }
  161. our_user_dict = Users.create_user(log_type=log_type, crawler=crawler, out_user_dict=out_user_dict, env=env, machine=machine)
  162. our_uid = our_user_dict['our_uid']
  163. our_user_link = our_user_dict['our_user_link']
  164. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
  165. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  166. our_user_list.append(our_user_dict)
  167. else:
  168. our_user_dict = {
  169. 'out_uid': out_uid,
  170. 'user_name': user_name,
  171. 'our_uid': our_uid,
  172. 'our_user_link': our_user_link,
  173. }
  174. our_user_list.append(our_user_dict)
  175. return our_user_list
  176. except Exception as e:
  177. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  178. @classmethod
  179. def random_signature(cls):
  180. src_digits = string.digits # string_数字
  181. src_uppercase = string.ascii_uppercase # string_大写字母
  182. src_lowercase = string.ascii_lowercase # string_小写字母
  183. digits_num = random.randint(1, 6)
  184. uppercase_num = random.randint(1, 26 - digits_num - 1)
  185. lowercase_num = 26 - (digits_num + uppercase_num)
  186. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  187. src_lowercase, lowercase_num)
  188. random.shuffle(password)
  189. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  190. new_password_start = new_password[0:18]
  191. new_password_end = new_password[-7:]
  192. if new_password[18] == '8':
  193. new_password = new_password_start + 'w' + new_password_end
  194. elif new_password[18] == '9':
  195. new_password = new_password_start + 'x' + new_password_end
  196. elif new_password[18] == '-':
  197. new_password = new_password_start + 'y' + new_password_end
  198. elif new_password[18] == '.':
  199. new_password = new_password_start + 'z' + new_password_end
  200. else:
  201. new_password = new_password_start + 'y' + new_password_end
  202. return new_password
  203. @classmethod
  204. def get_signature(cls, log_type, crawler, out_uid, machine):
  205. try:
  206. # 打印请求配置
  207. ca = DesiredCapabilities.CHROME
  208. ca["goog:loggingPrefs"] = {"performance": "ALL"}
  209. # 不打开浏览器运行
  210. chrome_options = webdriver.ChromeOptions()
  211. chrome_options.add_argument("--headless")
  212. chrome_options.add_argument('--user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.79 Safari/537.36')
  213. chrome_options.add_argument("--no-sandbox")
  214. # driver初始化
  215. if machine == 'aliyun' or machine == 'aliyun_hk':
  216. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options)
  217. elif machine == 'macpro':
  218. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  219. service=Service('/Users/lieyunye/Downloads/chromedriver_v86/chromedriver'))
  220. elif machine == 'macair':
  221. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options,
  222. service=Service('/Users/piaoquan/Downloads/chromedriver'))
  223. else:
  224. driver = webdriver.Chrome(desired_capabilities=ca, options=chrome_options, service=Service('/Users/wangkun/Downloads/chromedriver/chromedriver_v110/chromedriver'))
  225. driver.implicitly_wait(10)
  226. driver.get(f'https://www.ixigua.com/home/{out_uid}/')
  227. time.sleep(3)
  228. data_src = driver.find_elements(By.XPATH, '//img[@class="tt-img BU-MagicImage tt-img-loaded"]')[1].get_attribute("data-src")
  229. signature = data_src.split("x-signature=")[-1]
  230. return signature
  231. except Exception as e:
  232. Common.logger(log_type, crawler).error(f'get_signature异常:{e}\n')
  233. # 获取视频详情
  234. @classmethod
  235. def get_video_url(cls, log_type, crawler, gid):
  236. try:
  237. url = 'https://www.ixigua.com/api/mixVideo/information?'
  238. headers = {
  239. "accept-encoding": "gzip, deflate",
  240. "accept-language": "zh-CN,zh-Hans;q=0.9",
  241. "user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) "
  242. "AppleWebKit/605.1.15 (KHTML, like Gecko) Version/15.5 Safari/605.1.15",
  243. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  244. }
  245. params = {
  246. 'mixId': gid,
  247. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  248. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  249. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  250. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  251. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  252. }
  253. cookies = {
  254. 'ixigua-a-s': '1',
  255. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  256. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  257. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  258. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  259. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  260. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  261. '__ac_nonce': '06304878000964fdad287',
  262. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  263. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  264. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  265. '_tea_utm_cache_1300': 'undefined',
  266. 'support_avif': 'false',
  267. 'support_webp': 'false',
  268. 'xiguavideopcwebid': '7134967546256016900',
  269. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  270. }
  271. urllib3.disable_warnings()
  272. response = requests.get(url=url, headers=headers, params=params, cookies=cookies, verify=False, proxies=Common.tunnel_proxies())
  273. if 'data' not in response.json() or response.json()['data'] == '':
  274. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  275. else:
  276. video_info = response.json()['data']['gidInformation']['packerData']['video']
  277. video_url_dict = {}
  278. # video_url
  279. if 'videoResource' not in video_info:
  280. video_url_dict["video_url"] = ''
  281. video_url_dict["audio_url"] = ''
  282. video_url_dict["video_width"] = 0
  283. video_url_dict["video_height"] = 0
  284. elif 'dash_120fps' in video_info['videoResource']:
  285. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in video_info['videoResource']['dash_120fps']['video_list']:
  286. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  287. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  288. if len(video_url) % 3 == 1:
  289. video_url += '=='
  290. elif len(video_url) % 3 == 2:
  291. video_url += '='
  292. elif len(audio_url) % 3 == 1:
  293. audio_url += '=='
  294. elif len(audio_url) % 3 == 2:
  295. audio_url += '='
  296. video_url = base64.b64decode(video_url).decode('utf8')
  297. audio_url = base64.b64decode(audio_url).decode('utf8')
  298. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  299. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  300. video_url_dict["video_url"] = video_url
  301. video_url_dict["audio_url"] = audio_url
  302. video_url_dict["video_width"] = video_width
  303. video_url_dict["video_height"] = video_height
  304. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in video_info['videoResource']['dash_120fps']['video_list']:
  305. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  306. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  307. if len(video_url) % 3 == 1:
  308. video_url += '=='
  309. elif len(video_url) % 3 == 2:
  310. video_url += '='
  311. elif len(audio_url) % 3 == 1:
  312. audio_url += '=='
  313. elif len(audio_url) % 3 == 2:
  314. audio_url += '='
  315. video_url = base64.b64decode(video_url).decode('utf8')
  316. audio_url = base64.b64decode(audio_url).decode('utf8')
  317. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  318. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  319. video_url_dict["video_url"] = video_url
  320. video_url_dict["audio_url"] = audio_url
  321. video_url_dict["video_width"] = video_width
  322. video_url_dict["video_height"] = video_height
  323. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in video_info['videoResource']['dash_120fps']['video_list']:
  324. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  325. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  326. if len(video_url) % 3 == 1:
  327. video_url += '=='
  328. elif len(video_url) % 3 == 2:
  329. video_url += '='
  330. elif len(audio_url) % 3 == 1:
  331. audio_url += '=='
  332. elif len(audio_url) % 3 == 2:
  333. audio_url += '='
  334. video_url = base64.b64decode(video_url).decode('utf8')
  335. audio_url = base64.b64decode(audio_url).decode('utf8')
  336. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  337. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  338. video_url_dict["video_url"] = video_url
  339. video_url_dict["audio_url"] = audio_url
  340. video_url_dict["video_width"] = video_width
  341. video_url_dict["video_height"] = video_height
  342. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in video_info['videoResource']['dash_120fps']['video_list']:
  343. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  344. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  345. if len(video_url) % 3 == 1:
  346. video_url += '=='
  347. elif len(video_url) % 3 == 2:
  348. video_url += '='
  349. elif len(audio_url) % 3 == 1:
  350. audio_url += '=='
  351. elif len(audio_url) % 3 == 2:
  352. audio_url += '='
  353. video_url = base64.b64decode(video_url).decode('utf8')
  354. audio_url = base64.b64decode(audio_url).decode('utf8')
  355. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  356. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  357. video_url_dict["video_url"] = video_url
  358. video_url_dict["audio_url"] = audio_url
  359. video_url_dict["video_width"] = video_width
  360. video_url_dict["video_height"] = video_height
  361. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  362. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  363. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  364. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  365. and len(video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  366. video_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  367. audio_url = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  368. if len(video_url) % 3 == 1:
  369. video_url += '=='
  370. elif len(video_url) % 3 == 2:
  371. video_url += '='
  372. elif len(audio_url) % 3 == 1:
  373. audio_url += '=='
  374. elif len(audio_url) % 3 == 2:
  375. audio_url += '='
  376. video_url = base64.b64decode(video_url).decode('utf8')
  377. audio_url = base64.b64decode(audio_url).decode('utf8')
  378. video_width = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  379. video_height = video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  380. video_url_dict["video_url"] = video_url
  381. video_url_dict["audio_url"] = audio_url
  382. video_url_dict["video_width"] = video_width
  383. video_url_dict["video_height"] = video_height
  384. else:
  385. video_url_dict["video_url"] = ''
  386. video_url_dict["audio_url"] = ''
  387. video_url_dict["video_width"] = 0
  388. video_url_dict["video_height"] = 0
  389. elif 'dash' in video_info['videoResource']:
  390. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in video_info['videoResource']['dash']['video_list']:
  391. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  392. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  393. if len(video_url) % 3 == 1:
  394. video_url += '=='
  395. elif len(video_url) % 3 == 2:
  396. video_url += '='
  397. elif len(audio_url) % 3 == 1:
  398. audio_url += '=='
  399. elif len(audio_url) % 3 == 2:
  400. audio_url += '='
  401. video_url = base64.b64decode(video_url).decode('utf8')
  402. audio_url = base64.b64decode(audio_url).decode('utf8')
  403. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  404. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  405. video_url_dict["video_url"] = video_url
  406. video_url_dict["audio_url"] = audio_url
  407. video_url_dict["video_width"] = video_width
  408. video_url_dict["video_height"] = video_height
  409. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in video_info['videoResource']['dash']['video_list']:
  410. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  411. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  412. if len(video_url) % 3 == 1:
  413. video_url += '=='
  414. elif len(video_url) % 3 == 2:
  415. video_url += '='
  416. elif len(audio_url) % 3 == 1:
  417. audio_url += '=='
  418. elif len(audio_url) % 3 == 2:
  419. audio_url += '='
  420. video_url = base64.b64decode(video_url).decode('utf8')
  421. audio_url = base64.b64decode(audio_url).decode('utf8')
  422. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  423. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  424. video_url_dict["video_url"] = video_url
  425. video_url_dict["audio_url"] = audio_url
  426. video_url_dict["video_width"] = video_width
  427. video_url_dict["video_height"] = video_height
  428. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in video_info['videoResource']['dash']['video_list']:
  429. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  430. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  431. if len(video_url) % 3 == 1:
  432. video_url += '=='
  433. elif len(video_url) % 3 == 2:
  434. video_url += '='
  435. elif len(audio_url) % 3 == 1:
  436. audio_url += '=='
  437. elif len(audio_url) % 3 == 2:
  438. audio_url += '='
  439. video_url = base64.b64decode(video_url).decode('utf8')
  440. audio_url = base64.b64decode(audio_url).decode('utf8')
  441. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  442. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  443. video_url_dict["video_url"] = video_url
  444. video_url_dict["audio_url"] = audio_url
  445. video_url_dict["video_width"] = video_width
  446. video_url_dict["video_height"] = video_height
  447. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in video_info['videoResource']['dash']['video_list']:
  448. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  449. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  450. if len(video_url) % 3 == 1:
  451. video_url += '=='
  452. elif len(video_url) % 3 == 2:
  453. video_url += '='
  454. elif len(audio_url) % 3 == 1:
  455. audio_url += '=='
  456. elif len(audio_url) % 3 == 2:
  457. audio_url += '='
  458. video_url = base64.b64decode(video_url).decode('utf8')
  459. audio_url = base64.b64decode(audio_url).decode('utf8')
  460. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  461. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  462. video_url_dict["video_url"] = video_url
  463. video_url_dict["audio_url"] = audio_url
  464. video_url_dict["video_width"] = video_width
  465. video_url_dict["video_height"] = video_height
  466. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  467. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  468. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  469. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  470. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  471. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['backup_url_1']
  472. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1]['backup_url_1']
  473. if len(video_url) % 3 == 1:
  474. video_url += '=='
  475. elif len(video_url) % 3 == 2:
  476. video_url += '='
  477. elif len(audio_url) % 3 == 1:
  478. audio_url += '=='
  479. elif len(audio_url) % 3 == 2:
  480. audio_url += '='
  481. video_url = base64.b64decode(video_url).decode('utf8')
  482. audio_url = base64.b64decode(audio_url).decode('utf8')
  483. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vwidth']
  484. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1]['vheight']
  485. video_url_dict["video_url"] = video_url
  486. video_url_dict["audio_url"] = audio_url
  487. video_url_dict["video_width"] = video_width
  488. video_url_dict["video_height"] = video_height
  489. else:
  490. video_url_dict["video_url"] = ''
  491. video_url_dict["audio_url"] = ''
  492. video_url_dict["video_width"] = 0
  493. video_url_dict["video_height"] = 0
  494. elif 'normal' in video_info['videoResource']:
  495. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  496. video_info['videoResource']['normal']['video_list']:
  497. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  498. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  499. if len(video_url) % 3 == 1:
  500. video_url += '=='
  501. elif len(video_url) % 3 == 2:
  502. video_url += '='
  503. elif len(audio_url) % 3 == 1:
  504. audio_url += '=='
  505. elif len(audio_url) % 3 == 2:
  506. audio_url += '='
  507. video_url = base64.b64decode(video_url).decode('utf8')
  508. audio_url = base64.b64decode(audio_url).decode('utf8')
  509. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  510. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  511. video_url_dict["video_url"] = video_url
  512. video_url_dict["audio_url"] = audio_url
  513. video_url_dict["video_width"] = video_width
  514. video_url_dict["video_height"] = video_height
  515. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  516. video_info['videoResource']['normal']['video_list']:
  517. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  518. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  519. if len(video_url) % 3 == 1:
  520. video_url += '=='
  521. elif len(video_url) % 3 == 2:
  522. video_url += '='
  523. elif len(audio_url) % 3 == 1:
  524. audio_url += '=='
  525. elif len(audio_url) % 3 == 2:
  526. audio_url += '='
  527. video_url = base64.b64decode(video_url).decode('utf8')
  528. audio_url = base64.b64decode(audio_url).decode('utf8')
  529. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  530. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  531. video_url_dict["video_url"] = video_url
  532. video_url_dict["audio_url"] = audio_url
  533. video_url_dict["video_width"] = video_width
  534. video_url_dict["video_height"] = video_height
  535. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  536. video_info['videoResource']['normal']['video_list']:
  537. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  538. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  539. if len(video_url) % 3 == 1:
  540. video_url += '=='
  541. elif len(video_url) % 3 == 2:
  542. video_url += '='
  543. elif len(audio_url) % 3 == 1:
  544. audio_url += '=='
  545. elif len(audio_url) % 3 == 2:
  546. audio_url += '='
  547. video_url = base64.b64decode(video_url).decode('utf8')
  548. audio_url = base64.b64decode(audio_url).decode('utf8')
  549. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  550. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  551. video_url_dict["video_url"] = video_url
  552. video_url_dict["audio_url"] = audio_url
  553. video_url_dict["video_width"] = video_width
  554. video_url_dict["video_height"] = video_height
  555. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  556. video_info['videoResource']['normal']['video_list']:
  557. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  558. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  559. if len(video_url) % 3 == 1:
  560. video_url += '=='
  561. elif len(video_url) % 3 == 2:
  562. video_url += '='
  563. elif len(audio_url) % 3 == 1:
  564. audio_url += '=='
  565. elif len(audio_url) % 3 == 2:
  566. audio_url += '='
  567. video_url = base64.b64decode(video_url).decode('utf8')
  568. audio_url = base64.b64decode(audio_url).decode('utf8')
  569. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  570. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  571. video_url_dict["video_url"] = video_url
  572. video_url_dict["audio_url"] = audio_url
  573. video_url_dict["video_width"] = video_width
  574. video_url_dict["video_height"] = video_height
  575. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  576. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  577. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  578. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  579. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  580. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  581. 'backup_url_1']
  582. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  583. 'backup_url_1']
  584. if len(video_url) % 3 == 1:
  585. video_url += '=='
  586. elif len(video_url) % 3 == 2:
  587. video_url += '='
  588. elif len(audio_url) % 3 == 1:
  589. audio_url += '=='
  590. elif len(audio_url) % 3 == 2:
  591. audio_url += '='
  592. video_url = base64.b64decode(video_url).decode('utf8')
  593. audio_url = base64.b64decode(audio_url).decode('utf8')
  594. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  595. 'vwidth']
  596. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  597. 'vheight']
  598. video_url_dict["video_url"] = video_url
  599. video_url_dict["audio_url"] = audio_url
  600. video_url_dict["video_width"] = video_width
  601. video_url_dict["video_height"] = video_height
  602. else:
  603. video_url_dict["video_url"] = ''
  604. video_url_dict["audio_url"] = ''
  605. video_url_dict["video_width"] = 0
  606. video_url_dict["video_height"] = 0
  607. else:
  608. video_url_dict["video_url"] = ''
  609. video_url_dict["audio_url"] = ''
  610. video_url_dict["video_width"] = 0
  611. video_url_dict["video_height"] = 0
  612. return video_url_dict
  613. except Exception as e:
  614. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  615. @classmethod
  616. def get_videolist(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  617. try:
  618. signature = cls.random_signature()
  619. while True:
  620. url = "https://www.ixigua.com/api/videov2/author/new_video_list?"
  621. params = {
  622. 'to_user_id': str(out_uid),
  623. 'offset': str(cls.offset),
  624. 'limit': '30',
  625. 'maxBehotTime': '0',
  626. 'order': 'new',
  627. 'isHome': '0',
  628. # 'msToken': 'G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==',
  629. # 'X-Bogus': 'DFSzswVuEkUANjW9ShFTgR/F6qHt',
  630. '_signature': signature,
  631. }
  632. headers = {
  633. # 'authority': 'www.ixigua.com',
  634. # 'accept': 'application/json, text/plain, */*',
  635. # 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  636. # 'cache-control': 'no-cache',
  637. # 'cookie': f'MONITOR_WEB_ID=7168304743566296612; __ac_signature={signature}; ixigua-a-s=1; support_webp=true; support_avif=false; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; msToken=G0eRzNkw189a8TLaXjc6nTHVMQwh9XcxVAqTbGKi7iPJdQcLwS3-XRrJ3MZ7QBfqErpxp3EX1WtvWOIcZ3NIgr41hgcd-v64so_RRj3YCRw1UsKW8mIssNLlIMspsg==; tt_scid=o4agqz7u9SKPwfBoPt6S82Cw0q.9KDtqmNe0JHxMqmpxNHQWq1BmrQdgVU6jEoX7ed99; ttwid=1%7CHHtv2QqpSGuSu8r-zXF1QoWsvjmNi1SJrqOrZzg-UCY%7C1676618894%7Cee5ad95378275f282f230a7ffa9947ae7eff40d0829c5a2568672a6dc90a1c96; ixigua-a-s=1',
  638. # 'pragma': 'no-cache',
  639. 'referer': f'https://www.ixigua.com/home/{out_uid}/video/?preActiveKey=hotsoon&list_entrance=userdetail',
  640. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  641. # 'sec-ch-ua-mobile': '?0',
  642. # 'sec-ch-ua-platform': '"macOS"',
  643. # 'sec-fetch-dest': 'empty',
  644. # 'sec-fetch-mode': 'cors',
  645. # 'sec-fetch-site': 'same-origin',
  646. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  647. # 'x-secsdk-csrf-token': '00010000000119e3f9454d1dcbb288704cda1960f241e2d19bd21f2fd283520c3615a990ac5a17448bfbb902a249'
  648. }
  649. urllib3.disable_warnings()
  650. response = requests.get(url=url, headers=headers, params=params, proxies=Common.tunnel_proxies(), verify=False)
  651. cls.offset += 30
  652. if response.status_code != 200:
  653. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  654. cls.offset = 0
  655. return
  656. elif 'data' not in response.text:
  657. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.text}\n")
  658. cls.offset = 0
  659. return
  660. elif 'videoList' not in response.json()["data"]:
  661. Common.logger(log_type, crawler).warning(f"get_videolist_response:{response.json()}\n")
  662. cls.offset = 0
  663. return
  664. else:
  665. videoList = response.json()['data']['videoList']
  666. for i in range(len(videoList)):
  667. # video_title
  668. if 'title' not in videoList[i]:
  669. video_title = 0
  670. else:
  671. video_title = videoList[i]['title'].strip().replace('手游', '') \
  672. .replace('/', '').replace('\/', '').replace('\n', '')
  673. # video_id
  674. if 'video_id' not in videoList[i]:
  675. video_id = 0
  676. else:
  677. video_id = videoList[i]['video_id']
  678. # gid
  679. if 'gid' not in videoList[i]:
  680. gid = 0
  681. else:
  682. gid = videoList[i]['gid']
  683. # play_cnt
  684. if 'video_detail_info' not in videoList[i]:
  685. play_cnt = 0
  686. elif 'video_watch_count' not in videoList[i]['video_detail_info']:
  687. play_cnt = 0
  688. else:
  689. play_cnt = videoList[i]['video_detail_info']['video_watch_count']
  690. # comment_cnt
  691. if 'comment_count' not in videoList[i]:
  692. comment_cnt = 0
  693. else:
  694. comment_cnt = videoList[i]['comment_count']
  695. # like_cnt
  696. if 'digg_count' not in videoList[i]:
  697. like_cnt = 0
  698. else:
  699. like_cnt = videoList[i]['digg_count']
  700. # share_cnt
  701. share_cnt = 0
  702. # video_duration
  703. if 'video_duration' not in videoList[i]:
  704. video_duration = 0
  705. else:
  706. video_duration = int(videoList[i]['video_duration'])
  707. # send_time
  708. if 'publish_time' not in videoList[i]:
  709. publish_time = 0
  710. else:
  711. publish_time = videoList[i]['publish_time']
  712. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time))
  713. # is_top
  714. if 'is_top' not in videoList[i]:
  715. is_top = 0
  716. else:
  717. is_top = videoList[i]['is_top']
  718. # user_name
  719. if 'user_info' not in videoList[i]:
  720. user_name = 0
  721. elif 'name' not in videoList[i]['user_info']:
  722. user_name = 0
  723. else:
  724. user_name = videoList[i]['user_info']['name']
  725. # user_id
  726. if 'user_info' not in videoList[i]:
  727. user_id = 0
  728. elif 'user_id' not in videoList[i]['user_info']:
  729. user_id = 0
  730. else:
  731. user_id = videoList[i]['user_info']['user_id']
  732. # avatar_url
  733. if 'user_info' not in videoList[i]:
  734. avatar_url = 0
  735. elif 'avatar_url' not in videoList[i]['user_info']:
  736. avatar_url = 0
  737. else:
  738. avatar_url = videoList[i]['user_info']['avatar_url']
  739. # cover_url
  740. if 'video_detail_info' not in videoList[i]:
  741. cover_url = 0
  742. elif 'detail_video_large_image' not in videoList[i]['video_detail_info']:
  743. cover_url = 0
  744. elif 'url' in videoList[i]['video_detail_info']['detail_video_large_image']:
  745. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url']
  746. else:
  747. cover_url = videoList[i]['video_detail_info']['detail_video_large_image']['url_list'][0]['url']
  748. while True:
  749. rule_dict = cls.get_rule(log_type, crawler)
  750. if rule_dict is None:
  751. Common.logger(log_type, crawler).warning(f"rule_dict:{rule_dict}, 10秒后重试")
  752. time.sleep(10)
  753. else:
  754. break
  755. if gid == 0 or video_id == 0 or cover_url == 0:
  756. Common.logger(log_type, crawler).info('无效视频\n')
  757. elif is_top is True and int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  758. Common.logger(log_type, crawler).info(f'置顶视频,且发布时间:{publish_time_str} 超过{rule_dict["publish_time"]}天\n')
  759. elif int(time.time()) - int(publish_time) > 3600 * 24 * rule_dict['publish_time']:
  760. Common.logger(log_type, crawler).info(f'发布时间:{publish_time_str}超过{rule_dict["publish_time"]}天\n')
  761. cls.offset = 0
  762. return
  763. else:
  764. video_url_dict = cls.get_video_url(log_type, crawler, gid)
  765. video_url = video_url_dict["video_url"]
  766. audio_url = video_url_dict["audio_url"]
  767. video_width = video_url_dict["video_width"]
  768. video_height = video_url_dict["video_height"]
  769. video_dict = {'video_title': video_title,
  770. 'video_id': video_id,
  771. 'gid': gid,
  772. 'play_cnt': play_cnt,
  773. 'comment_cnt': comment_cnt,
  774. 'like_cnt': like_cnt,
  775. 'share_cnt': share_cnt,
  776. 'video_width': video_width,
  777. 'video_height': video_height,
  778. 'duration': video_duration,
  779. 'publish_time_stamp': publish_time,
  780. 'publish_time_str': publish_time_str,
  781. 'is_top': is_top,
  782. 'user_name': user_name,
  783. 'user_id': user_id,
  784. 'avatar_url': avatar_url,
  785. 'cover_url': cover_url,
  786. 'audio_url': audio_url,
  787. 'video_url': video_url,
  788. 'session': signature}
  789. for k, v in video_dict.items():
  790. Common.logger(log_type, crawler).info(f"{k}:{v}")
  791. cls.download_publish(log_type=log_type,
  792. crawler=crawler,
  793. video_dict=video_dict,
  794. rule_dict=rule_dict,
  795. strategy=strategy,
  796. our_uid=our_uid,
  797. oss_endpoint=oss_endpoint,
  798. env=env,
  799. machine=machine)
  800. except Exception as e:
  801. Common.logger(log_type, crawler).error(f"get_videolist:{e}\n")
  802. @classmethod
  803. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  804. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  805. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  806. return len(repeat_video)
  807. # 下载 / 上传
  808. @classmethod
  809. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  810. try:
  811. if cls.download_rule(video_dict, rule_dict) is False:
  812. Common.logger(log_type, crawler).info('不满足抓取规则\n')
  813. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  814. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  815. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  816. Common.logger(log_type, crawler).info('视频已下载\n')
  817. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'e075e9') for x in y]:
  818. # Common.logger(log_type, crawler).info('视频已下载\n')
  819. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', '3Ul6wZ') for x in y]:
  820. # Common.logger(log_type, crawler).info('视频已下载\n')
  821. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'QOWqMo') for x in y]:
  822. # Common.logger(log_type, crawler).info('视频已下载\n')
  823. # elif str(video_dict['video_id']) in [x for y in Feishu.get_values_batch(log_type, 'xigua', 'wjhpDs') for x in y]:
  824. # Common.logger(log_type, crawler).info('视频已存在\n')
  825. else:
  826. # 下载封面
  827. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  828. # 下载视频
  829. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video', title=video_dict['video_title'], url=video_dict['video_url'])
  830. # 下载音频
  831. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio', title=video_dict['video_title'], url=video_dict['audio_url'])
  832. # 保存视频信息至txt
  833. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  834. # 合成音视频
  835. Common.video_compose(log_type=log_type, crawler=crawler, video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  836. # 上传视频
  837. Common.logger(log_type, crawler).info("开始上传视频...")
  838. our_video_id = Publish.upload_and_publish(log_type=log_type,
  839. crawler=crawler,
  840. strategy=strategy,
  841. our_uid=our_uid,
  842. env=env,
  843. oss_endpoint=oss_endpoint)
  844. if env == 'dev':
  845. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  846. else:
  847. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  848. Common.logger(log_type, crawler).info("视频上传完成")
  849. if our_video_id is None:
  850. # 删除视频文件夹
  851. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  852. return
  853. # 视频写入飞书
  854. Feishu.insert_columns(log_type, 'xigua', "e075e9", "ROWS", 1, 2)
  855. upload_time = int(time.time())
  856. values = [[time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  857. "定向榜",
  858. video_dict['video_title'],
  859. str(video_dict['video_id']),
  860. our_video_link,
  861. video_dict['gid'],
  862. video_dict['play_cnt'],
  863. video_dict['comment_cnt'],
  864. video_dict['like_cnt'],
  865. video_dict['share_cnt'],
  866. video_dict['duration'],
  867. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  868. video_dict['publish_time_str'],
  869. video_dict['user_name'],
  870. video_dict['user_id'],
  871. video_dict['avatar_url'],
  872. video_dict['cover_url'],
  873. video_dict['video_url'],
  874. video_dict['audio_url']]]
  875. time.sleep(1)
  876. Feishu.update_values(log_type, 'xigua', "e075e9", "F2:Z2", values)
  877. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  878. # 视频信息保存数据库
  879. insert_sql = f""" insert into crawler_video(video_id,
  880. user_id,
  881. out_user_id,
  882. platform,
  883. strategy,
  884. out_video_id,
  885. video_title,
  886. cover_url,
  887. video_url,
  888. duration,
  889. publish_time,
  890. play_cnt,
  891. crawler_rule,
  892. width,
  893. height)
  894. values({our_video_id},
  895. {our_uid},
  896. "{video_dict['user_id']}",
  897. "{cls.platform}",
  898. "定向爬虫策略",
  899. "{video_dict['video_id']}",
  900. "{video_dict['video_title']}",
  901. "{video_dict['cover_url']}",
  902. "{video_dict['video_url']}",
  903. {int(video_dict['duration'])},
  904. "{video_dict['publish_time_str']}",
  905. {int(video_dict['play_cnt'])},
  906. '{json.dumps(rule_dict)}',
  907. {int(video_dict['video_width'])},
  908. {int(video_dict['video_height'])}) """
  909. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  910. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  911. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  912. except Exception as e:
  913. Common.logger(log_type, crawler).error(f'download_publish异常:{e}\n')
  914. @classmethod
  915. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  916. try:
  917. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="5tlTYB", env=env, machine=machine)
  918. for user in user_list:
  919. out_uid = user["out_uid"]
  920. user_name = user["user_name"]
  921. our_uid = user["our_uid"]
  922. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  923. cls.get_videolist(log_type=log_type,
  924. crawler=crawler,
  925. strategy=strategy,
  926. our_uid=our_uid,
  927. out_uid=out_uid,
  928. oss_endpoint=oss_endpoint,
  929. env=env,
  930. machine=machine)
  931. cls.offset = 0
  932. time.sleep(3)
  933. except Exception as e:
  934. Common.logger(log_type, crawler).error(f"get_follow_videos:{e}\n")
  935. if __name__ == '__main__':
  936. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  937. # Follow.get_videolist(log_type="follow",
  938. # crawler="xigua",
  939. # strategy="定向爬虫策略",
  940. # our_uid="6267141",
  941. # out_uid="95420624045",
  942. # oss_endpoint="out",
  943. # env="dev",
  944. # machine="local")
  945. # print(Follow.random_signature())
  946. rule = Follow.get_rule("follow", "xigua")
  947. print(type(rule))
  948. print(type(json.dumps(rule)))
  949. print(json.dumps(rule))
  950. pass