xigua_search.py 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. from hashlib import md5
  13. import requests
  14. import urllib3
  15. from urllib.parse import quote
  16. from requests.adapters import HTTPAdapter
  17. sys.path.append(os.getcwd())
  18. from common.db import MysqlHelper
  19. from common.getuser import getUser
  20. from common.common import Common
  21. from common.feishu import Feishu
  22. from common.publish import Publish
  23. from common.userAgent import get_random_user_agent, get_random_header
  24. class XiguaSearch:
  25. platform = "西瓜视频"
  26. tag = "西瓜视频爬虫,搜索爬虫策略"
  27. @classmethod
  28. def get_rule(cls, log_type, crawler):
  29. try:
  30. while True:
  31. rule_sheet = Feishu.get_values_batch(log_type, crawler, "shxOl7")
  32. if rule_sheet is None:
  33. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  34. time.sleep(10)
  35. continue
  36. rule_dict = {
  37. "play_cnt": int(rule_sheet[1][2]),
  38. "min_duration": int(rule_sheet[2][2]),
  39. "max_duration": int(rule_sheet[3][2]),
  40. "publish_time": int(rule_sheet[4][2]),
  41. }
  42. return rule_dict
  43. except Exception as e:
  44. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  45. # 下载规则
  46. @classmethod
  47. def download_rule(cls, video_info_dict, rule_dict):
  48. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  49. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  50. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  51. if video_info_dict['duration'] >= rule_dict['duration']:
  52. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  53. or video_info_dict['video_height'] >= rule_dict['video_height']:
  54. return True
  55. else:
  56. return False
  57. else:
  58. return False
  59. else:
  60. return False
  61. else:
  62. return False
  63. else:
  64. return False
  65. # 过滤词库
  66. @classmethod
  67. def filter_words(cls, log_type, crawler):
  68. try:
  69. while True:
  70. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  71. if filter_words_sheet is None:
  72. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  73. continue
  74. filter_words_list = []
  75. for x in filter_words_sheet:
  76. for y in x:
  77. if y is None:
  78. pass
  79. else:
  80. filter_words_list.append(y)
  81. return filter_words_list
  82. except Exception as e:
  83. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  84. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  85. @classmethod
  86. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  87. try:
  88. while True:
  89. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  90. if user_sheet is None:
  91. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  92. continue
  93. our_user_list = []
  94. for i in range(1, len(user_sheet)):
  95. our_uid = user_sheet[i][6]
  96. search_word = user_sheet[i][4]
  97. tag1 = user_sheet[i][8]
  98. tag2 = user_sheet[i][9]
  99. tag3 = user_sheet[i][10]
  100. tag4 = user_sheet[i][11]
  101. tag5 = user_sheet[i][12]
  102. tag6 = user_sheet[i][13]
  103. tag7 = user_sheet[i][14]
  104. Common.logger(log_type, crawler).info(f"正在更新 {search_word} 关键词信息\n")
  105. if our_uid is None:
  106. default_user = getUser.get_default_user()
  107. # 用来创建our_id的信息
  108. user_dict = {
  109. 'nickName': default_user['nickName'],
  110. 'avatarUrl': default_user['avatarUrl'],
  111. 'tagName': f'{tag1},{tag2},{tag3},{tag4},{tag5},{tag6},{tag7}',
  112. }
  113. Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
  114. our_uid = getUser.create_uid(log_type, crawler, user_dict, env)
  115. if env == 'prod':
  116. our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
  117. else:
  118. our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
  119. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  120. [[our_uid, our_user_link]])
  121. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  122. our_user_dict = {
  123. 'out_uid': '',
  124. 'search_word': search_word,
  125. 'our_uid': our_uid,
  126. 'our_user_link': f'https://admin.piaoquantv.com/ums/user/{our_uid}/post',
  127. }
  128. our_user_list.append(our_user_dict)
  129. return our_user_list
  130. except Exception as e:
  131. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  132. @classmethod
  133. def random_signature(cls):
  134. src_digits = string.digits # string_数字
  135. src_uppercase = string.ascii_uppercase # string_大写字母
  136. src_lowercase = string.ascii_lowercase # string_小写字母
  137. digits_num = random.randint(1, 6)
  138. uppercase_num = random.randint(1, 26 - digits_num - 1)
  139. lowercase_num = 26 - (digits_num + uppercase_num)
  140. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  141. src_lowercase, lowercase_num)
  142. random.shuffle(password)
  143. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  144. new_password_start = new_password[0:18]
  145. new_password_end = new_password[-7:]
  146. if new_password[18] == '8':
  147. new_password = new_password_start + 'w' + new_password_end
  148. elif new_password[18] == '9':
  149. new_password = new_password_start + 'x' + new_password_end
  150. elif new_password[18] == '-':
  151. new_password = new_password_start + 'y' + new_password_end
  152. elif new_password[18] == '.':
  153. new_password = new_password_start + 'z' + new_password_end
  154. else:
  155. new_password = new_password_start + 'y' + new_password_end
  156. return new_password
  157. # 获取视频详情
  158. @classmethod
  159. def get_video_url(cls, log_type, crawler, gid):
  160. try:
  161. url = 'https://www.ixigua.com/api/mixVideo/information?'
  162. headers = {
  163. "accept-encoding": "gzip, deflate",
  164. "accept-language": "zh-CN,zh-Hans;q=0.9",
  165. "user-agent": get_random_user_agent('pc'),
  166. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  167. }
  168. params = {
  169. 'mixId': gid,
  170. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  171. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  172. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  173. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  174. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  175. }
  176. cookies = {
  177. 'ixigua-a-s': '1',
  178. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  179. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  180. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  181. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  182. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  183. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  184. '__ac_nonce': '06304878000964fdad287',
  185. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  186. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  187. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  188. '_tea_utm_cache_1300': 'undefined',
  189. 'support_avif': 'false',
  190. 'support_webp': 'false',
  191. 'xiguavideopcwebid': '7134967546256016900',
  192. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  193. }
  194. urllib3.disable_warnings()
  195. s = requests.session()
  196. # max_retries=3 重试3次
  197. s.mount('http://', HTTPAdapter(max_retries=3))
  198. s.mount('https://', HTTPAdapter(max_retries=3))
  199. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False,
  200. proxies=Common.tunnel_proxies(), timeout=5)
  201. response.close()
  202. if 'data' not in response.json() or response.json()['data'] == '':
  203. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  204. else:
  205. video_info = response.json()['data']['gidInformation']['packerData']['video']
  206. video_url_dict = {}
  207. # video_url
  208. if 'videoResource' not in video_info:
  209. video_url_dict["video_url"] = ''
  210. video_url_dict["audio_url"] = ''
  211. video_url_dict["video_width"] = 0
  212. video_url_dict["video_height"] = 0
  213. elif 'dash_120fps' in video_info['videoResource']:
  214. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in \
  215. video_info['videoResource']['dash_120fps']['video_list']:
  216. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  217. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  218. if len(video_url) % 3 == 1:
  219. video_url += '=='
  220. elif len(video_url) % 3 == 2:
  221. video_url += '='
  222. elif len(audio_url) % 3 == 1:
  223. audio_url += '=='
  224. elif len(audio_url) % 3 == 2:
  225. audio_url += '='
  226. video_url = base64.b64decode(video_url).decode('utf8')
  227. audio_url = base64.b64decode(audio_url).decode('utf8')
  228. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  229. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  230. video_url_dict["video_url"] = video_url
  231. video_url_dict["audio_url"] = audio_url
  232. video_url_dict["video_width"] = video_width
  233. video_url_dict["video_height"] = video_height
  234. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in \
  235. video_info['videoResource']['dash_120fps']['video_list']:
  236. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  237. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  238. if len(video_url) % 3 == 1:
  239. video_url += '=='
  240. elif len(video_url) % 3 == 2:
  241. video_url += '='
  242. elif len(audio_url) % 3 == 1:
  243. audio_url += '=='
  244. elif len(audio_url) % 3 == 2:
  245. audio_url += '='
  246. video_url = base64.b64decode(video_url).decode('utf8')
  247. audio_url = base64.b64decode(audio_url).decode('utf8')
  248. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  249. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  250. video_url_dict["video_url"] = video_url
  251. video_url_dict["audio_url"] = audio_url
  252. video_url_dict["video_width"] = video_width
  253. video_url_dict["video_height"] = video_height
  254. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in \
  255. video_info['videoResource']['dash_120fps']['video_list']:
  256. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  257. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  258. if len(video_url) % 3 == 1:
  259. video_url += '=='
  260. elif len(video_url) % 3 == 2:
  261. video_url += '='
  262. elif len(audio_url) % 3 == 1:
  263. audio_url += '=='
  264. elif len(audio_url) % 3 == 2:
  265. audio_url += '='
  266. video_url = base64.b64decode(video_url).decode('utf8')
  267. audio_url = base64.b64decode(audio_url).decode('utf8')
  268. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  269. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  270. video_url_dict["video_url"] = video_url
  271. video_url_dict["audio_url"] = audio_url
  272. video_url_dict["video_width"] = video_width
  273. video_url_dict["video_height"] = video_height
  274. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in \
  275. video_info['videoResource']['dash_120fps']['video_list']:
  276. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  277. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  278. if len(video_url) % 3 == 1:
  279. video_url += '=='
  280. elif len(video_url) % 3 == 2:
  281. video_url += '='
  282. elif len(audio_url) % 3 == 1:
  283. audio_url += '=='
  284. elif len(audio_url) % 3 == 2:
  285. audio_url += '='
  286. video_url = base64.b64decode(video_url).decode('utf8')
  287. audio_url = base64.b64decode(audio_url).decode('utf8')
  288. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  289. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  290. video_url_dict["video_url"] = video_url
  291. video_url_dict["audio_url"] = audio_url
  292. video_url_dict["video_width"] = video_width
  293. video_url_dict["video_height"] = video_height
  294. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  295. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  296. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  297. and len(
  298. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  299. and len(
  300. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  301. video_url = \
  302. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  303. 'backup_url_1']
  304. audio_url = \
  305. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1][
  306. 'backup_url_1']
  307. if len(video_url) % 3 == 1:
  308. video_url += '=='
  309. elif len(video_url) % 3 == 2:
  310. video_url += '='
  311. elif len(audio_url) % 3 == 1:
  312. audio_url += '=='
  313. elif len(audio_url) % 3 == 2:
  314. audio_url += '='
  315. video_url = base64.b64decode(video_url).decode('utf8')
  316. audio_url = base64.b64decode(audio_url).decode('utf8')
  317. video_width = \
  318. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  319. 'vwidth']
  320. video_height = \
  321. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  322. 'vheight']
  323. video_url_dict["video_url"] = video_url
  324. video_url_dict["audio_url"] = audio_url
  325. video_url_dict["video_width"] = video_width
  326. video_url_dict["video_height"] = video_height
  327. else:
  328. video_url_dict["video_url"] = ''
  329. video_url_dict["audio_url"] = ''
  330. video_url_dict["video_width"] = 0
  331. video_url_dict["video_height"] = 0
  332. elif 'dash' in video_info['videoResource']:
  333. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in \
  334. video_info['videoResource']['dash']['video_list']:
  335. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  336. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  337. if len(video_url) % 3 == 1:
  338. video_url += '=='
  339. elif len(video_url) % 3 == 2:
  340. video_url += '='
  341. elif len(audio_url) % 3 == 1:
  342. audio_url += '=='
  343. elif len(audio_url) % 3 == 2:
  344. audio_url += '='
  345. video_url = base64.b64decode(video_url).decode('utf8')
  346. audio_url = base64.b64decode(audio_url).decode('utf8')
  347. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  348. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  349. video_url_dict["video_url"] = video_url
  350. video_url_dict["audio_url"] = audio_url
  351. video_url_dict["video_width"] = video_width
  352. video_url_dict["video_height"] = video_height
  353. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in \
  354. video_info['videoResource']['dash']['video_list']:
  355. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  356. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  357. if len(video_url) % 3 == 1:
  358. video_url += '=='
  359. elif len(video_url) % 3 == 2:
  360. video_url += '='
  361. elif len(audio_url) % 3 == 1:
  362. audio_url += '=='
  363. elif len(audio_url) % 3 == 2:
  364. audio_url += '='
  365. video_url = base64.b64decode(video_url).decode('utf8')
  366. audio_url = base64.b64decode(audio_url).decode('utf8')
  367. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  368. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  369. video_url_dict["video_url"] = video_url
  370. video_url_dict["audio_url"] = audio_url
  371. video_url_dict["video_width"] = video_width
  372. video_url_dict["video_height"] = video_height
  373. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in \
  374. video_info['videoResource']['dash']['video_list']:
  375. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  376. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  377. if len(video_url) % 3 == 1:
  378. video_url += '=='
  379. elif len(video_url) % 3 == 2:
  380. video_url += '='
  381. elif len(audio_url) % 3 == 1:
  382. audio_url += '=='
  383. elif len(audio_url) % 3 == 2:
  384. audio_url += '='
  385. video_url = base64.b64decode(video_url).decode('utf8')
  386. audio_url = base64.b64decode(audio_url).decode('utf8')
  387. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  388. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  389. video_url_dict["video_url"] = video_url
  390. video_url_dict["audio_url"] = audio_url
  391. video_url_dict["video_width"] = video_width
  392. video_url_dict["video_height"] = video_height
  393. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in \
  394. video_info['videoResource']['dash']['video_list']:
  395. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  396. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  397. if len(video_url) % 3 == 1:
  398. video_url += '=='
  399. elif len(video_url) % 3 == 2:
  400. video_url += '='
  401. elif len(audio_url) % 3 == 1:
  402. audio_url += '=='
  403. elif len(audio_url) % 3 == 2:
  404. audio_url += '='
  405. video_url = base64.b64decode(video_url).decode('utf8')
  406. audio_url = base64.b64decode(audio_url).decode('utf8')
  407. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  408. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  409. video_url_dict["video_url"] = video_url
  410. video_url_dict["audio_url"] = audio_url
  411. video_url_dict["video_width"] = video_width
  412. video_url_dict["video_height"] = video_height
  413. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  414. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  415. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  416. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  417. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  418. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  419. 'backup_url_1']
  420. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1][
  421. 'backup_url_1']
  422. if len(video_url) % 3 == 1:
  423. video_url += '=='
  424. elif len(video_url) % 3 == 2:
  425. video_url += '='
  426. elif len(audio_url) % 3 == 1:
  427. audio_url += '=='
  428. elif len(audio_url) % 3 == 2:
  429. audio_url += '='
  430. video_url = base64.b64decode(video_url).decode('utf8')
  431. audio_url = base64.b64decode(audio_url).decode('utf8')
  432. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  433. 'vwidth']
  434. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  435. 'vheight']
  436. video_url_dict["video_url"] = video_url
  437. video_url_dict["audio_url"] = audio_url
  438. video_url_dict["video_width"] = video_width
  439. video_url_dict["video_height"] = video_height
  440. else:
  441. video_url_dict["video_url"] = ''
  442. video_url_dict["audio_url"] = ''
  443. video_url_dict["video_width"] = 0
  444. video_url_dict["video_height"] = 0
  445. elif 'normal' in video_info['videoResource']:
  446. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  447. video_info['videoResource']['normal']['video_list']:
  448. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  449. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  450. if len(video_url) % 3 == 1:
  451. video_url += '=='
  452. elif len(video_url) % 3 == 2:
  453. video_url += '='
  454. elif len(audio_url) % 3 == 1:
  455. audio_url += '=='
  456. elif len(audio_url) % 3 == 2:
  457. audio_url += '='
  458. video_url = base64.b64decode(video_url).decode('utf8')
  459. audio_url = base64.b64decode(audio_url).decode('utf8')
  460. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  461. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  462. video_url_dict["video_url"] = video_url
  463. video_url_dict["audio_url"] = audio_url
  464. video_url_dict["video_width"] = video_width
  465. video_url_dict["video_height"] = video_height
  466. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  467. video_info['videoResource']['normal']['video_list']:
  468. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  469. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  470. if len(video_url) % 3 == 1:
  471. video_url += '=='
  472. elif len(video_url) % 3 == 2:
  473. video_url += '='
  474. elif len(audio_url) % 3 == 1:
  475. audio_url += '=='
  476. elif len(audio_url) % 3 == 2:
  477. audio_url += '='
  478. video_url = base64.b64decode(video_url).decode('utf8')
  479. audio_url = base64.b64decode(audio_url).decode('utf8')
  480. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  481. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  482. video_url_dict["video_url"] = video_url
  483. video_url_dict["audio_url"] = audio_url
  484. video_url_dict["video_width"] = video_width
  485. video_url_dict["video_height"] = video_height
  486. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  487. video_info['videoResource']['normal']['video_list']:
  488. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  489. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  490. if len(video_url) % 3 == 1:
  491. video_url += '=='
  492. elif len(video_url) % 3 == 2:
  493. video_url += '='
  494. elif len(audio_url) % 3 == 1:
  495. audio_url += '=='
  496. elif len(audio_url) % 3 == 2:
  497. audio_url += '='
  498. video_url = base64.b64decode(video_url).decode('utf8')
  499. audio_url = base64.b64decode(audio_url).decode('utf8')
  500. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  501. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  502. video_url_dict["video_url"] = video_url
  503. video_url_dict["audio_url"] = audio_url
  504. video_url_dict["video_width"] = video_width
  505. video_url_dict["video_height"] = video_height
  506. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  507. video_info['videoResource']['normal']['video_list']:
  508. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  509. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  510. if len(video_url) % 3 == 1:
  511. video_url += '=='
  512. elif len(video_url) % 3 == 2:
  513. video_url += '='
  514. elif len(audio_url) % 3 == 1:
  515. audio_url += '=='
  516. elif len(audio_url) % 3 == 2:
  517. audio_url += '='
  518. video_url = base64.b64decode(video_url).decode('utf8')
  519. audio_url = base64.b64decode(audio_url).decode('utf8')
  520. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  521. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  522. video_url_dict["video_url"] = video_url
  523. video_url_dict["audio_url"] = audio_url
  524. video_url_dict["video_width"] = video_width
  525. video_url_dict["video_height"] = video_height
  526. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  527. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  528. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  529. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  530. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  531. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  532. 'backup_url_1']
  533. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  534. 'backup_url_1']
  535. if len(video_url) % 3 == 1:
  536. video_url += '=='
  537. elif len(video_url) % 3 == 2:
  538. video_url += '='
  539. elif len(audio_url) % 3 == 1:
  540. audio_url += '=='
  541. elif len(audio_url) % 3 == 2:
  542. audio_url += '='
  543. video_url = base64.b64decode(video_url).decode('utf8')
  544. audio_url = base64.b64decode(audio_url).decode('utf8')
  545. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  546. 'vwidth']
  547. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  548. 'vheight']
  549. video_url_dict["video_url"] = video_url
  550. video_url_dict["audio_url"] = audio_url
  551. video_url_dict["video_width"] = video_width
  552. video_url_dict["video_height"] = video_height
  553. else:
  554. video_url_dict["video_url"] = ''
  555. video_url_dict["audio_url"] = ''
  556. video_url_dict["video_width"] = 0
  557. video_url_dict["video_height"] = 0
  558. else:
  559. video_url_dict["video_url"] = ''
  560. video_url_dict["audio_url"] = ''
  561. video_url_dict["video_width"] = 0
  562. video_url_dict["video_height"] = 0
  563. return video_url_dict
  564. except Exception as e:
  565. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  566. @classmethod
  567. def get_video_info(cls, log_type, crawler, item_id):
  568. d_url = "http://a6.pstatp.com/article/full/11/1/{video_id}/{video_id}/1/0/?iid=3636030325&device_id=5787057242" \
  569. "&ac=wifi&channel=wandoujia&aid=13&app_name=news_article&version_code=532&version_name=5.3.2&device_platform" \
  570. "=android&ab_client=a1%2Cc2%2Ce1%2Cf2%2Cg2%2Cb3%2Cf4&abflag=3&ssmix=a&device_type=SM705" \
  571. "&device_brand=smartisan&os_api=19&os_version=4.4.2&uuid=864593021012562&openudid=e23a5ff037ef2d1a" \
  572. "&manifest_version_code=532&resolution=1080*1920&dpi=480&update_version_code=5320".format(
  573. video_id=item_id)
  574. res = requests.get(url=d_url, headers=get_random_header('pc'), proxies=Common.tunnel_proxies())
  575. data = json.loads(res.text)['data']
  576. item_counter = data['h5_extra']['itemCell']['itemCounter']
  577. user_info = data['user_info']
  578. detail_info = data['video_detail_info']
  579. video_dict = {'video_title': data['title'],
  580. 'video_id': detail_info['video_id'],
  581. 'gid': data['group_id'],
  582. 'play_cnt': item_counter['videoWatchCount'],
  583. 'comment_cnt': item_counter['commentCount'],
  584. 'like_cnt': item_counter['diggCount'],
  585. 'share_cnt': item_counter['shareCount'],
  586. 'duration': data['video_duration'],
  587. 'publish_time_stamp': data['publish_time'],
  588. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S",
  589. time.localtime(data['publish_time'])),
  590. 'user_name': user_info['name'],
  591. 'user_id': user_info['user_id'],
  592. 'avatar_url': user_info['avatar_url'],
  593. 'cover_url': data['large_image']['url'].replace('\u0026', '&'),
  594. }
  595. return video_dict
  596. @classmethod
  597. def is_ruled(cls, log_type, crawler, video_dict, rule_dict):
  598. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  599. if video_dict['publish_time_stamp'] <= old_time:
  600. return False
  601. elif video_dict['play_cnt'] <= rule_dict['play_cnt']:
  602. return False
  603. elif video_dict['duration'] < rule_dict['min_duration'] or video_dict['duration'] > rule_dict['max_duration']:
  604. return False
  605. else:
  606. return True
  607. @classmethod
  608. def get_videolist(cls, log_type, crawler, strategy, our_uid, search_word, oss_endpoint, env, machine):
  609. total_count = 1
  610. offset = 0
  611. while True:
  612. signature = cls.random_signature()
  613. url = "https://www.ixigua.com/api/searchv2/complex/{}/{}?order_type=publish_time&click_position=new".format(
  614. quote(search_word), offset, signature)
  615. headers = {
  616. 'referer': 'https://www.ixigua.com/search/{}/?logTag=594535e3690f17a88cdb&tab_name=search'.format(
  617. quote(search_word)),
  618. 'cookie': 'ttwid=1%7Cx_4RDmVTqp6BQ5Xy5AnuCZCQdDyDxv-fnMVWzj19VU0%7C1679382377%7C4e25692dc4b9d5dca56d690001d168b21ed028a9ac075808ab9262238cb405ee;',
  619. 'user-agent': get_random_user_agent('pc'),
  620. }
  621. try:
  622. res = requests.request("GET", url, headers=headers, proxies=Common.tunnel_proxies())
  623. search_list = res.json()['data']['data']
  624. except Exception as e:
  625. continue
  626. if not search_list:
  627. Common.logger(log_type, crawler).error(f'关键词:{search_word},没有获取到视频列表:offset{offset}')
  628. return
  629. for video_info in search_list:
  630. v_type = video_info['type']
  631. rule_dict = cls.get_rule(log_type, crawler)
  632. publish_time = video_info['data']['publish_time']
  633. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  634. if publish_time <= old_time:
  635. Common.logger(log_type, crawler).error(f'关键词:{search_word},抓取完毕,退出抓取\n')
  636. return
  637. if v_type == 'video':
  638. item_id = video_info['data']['group_id']
  639. if video_info['data']['publish_time'] <= old_time:
  640. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  641. continue
  642. elif video_info['data']['video_watch_count'] <= rule_dict['play_cnt']:
  643. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  644. continue
  645. elif video_info['data']['video_time'] < rule_dict['min_duration'] or video_info['data'][
  646. 'video_time'] > rule_dict['max_duration']:
  647. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  648. continue
  649. try:
  650. video_dict = cls.get_video_info(log_type, crawler, item_id)
  651. video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  652. video_dict['video_width'] = video_url_dict["video_width"]
  653. video_dict['video_height'] = video_url_dict["video_height"]
  654. video_dict['audio_url'] = video_url_dict["audio_url"]
  655. video_dict['video_url'] = video_url_dict["video_url"]
  656. video_dict['session'] = signature
  657. except Exception as e:
  658. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},获取详情失败,原因:{e}')
  659. continue
  660. if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  661. Common.logger(log_type, crawler).info(f'关键词:{search_word},gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  662. continue
  663. for k, v in video_dict.items():
  664. Common.logger(log_type, crawler).info(f"{k}:{v}")
  665. try:
  666. # print(
  667. # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  668. cls.download_publish(
  669. search_word=search_word,
  670. log_type=log_type,
  671. crawler=crawler,
  672. video_dict=video_dict,
  673. rule_dict=rule_dict,
  674. strategy=strategy,
  675. our_uid=our_uid,
  676. oss_endpoint=oss_endpoint,
  677. env=env,
  678. machine=machine
  679. )
  680. except Exception as e:
  681. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},下载失败,原因:{e}')
  682. continue
  683. total_count += 1
  684. Common.logger(log_type, crawler).info(f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  685. if total_count >= 30:
  686. return
  687. # elif v_type == 'pseries':
  688. # try:
  689. # item_id = video_info['data']['group_id']
  690. # p_url = "https://www.ixigua.com/api/videov2/pseries_more_v2?pSeriesId={}&rank=0&tailCount=30&aid=1768&msToken=wHEafKFLx0k3hihOPbhXYNsfMBxWiq2AB0K5R-34kEFixyq3ATi_DuXbL4Q47J9C2uK2zgWItMa1g2yc4FyDxM4dMijmSdwF4c4T8sSmOkoOI0wGzeEcPw==&X-Bogus=DFSzswVOzdUANG3ItaVHYr7TlqCv&_signature=_02B4Z6wo00001vB6l3QAAIDBZKzMeTihTmbwepPAANgh1Ai3JgFFo4e6anoezmBEpHfEMEYlWISGhXI-QKfev4N-2bwgXsHOuNGLnOsGqMbANIjFPh7Yj6OakQWrkbACenlv0P-arswtB6Zn45".format(
  691. # item_id)
  692. # p_headers = {
  693. # 'referer': 'https://www.ixigua.com/{}?series_flow=1&logTag=cfec9d927da968feff89'.format(
  694. # item_id),
  695. # 'user-agent': get_random_user_agent('pc'),
  696. # }
  697. # p_res = requests.request("GET", p_url, headers=p_headers,
  698. # proxies=Common.tunnel_proxies()).json()
  699. # except Exception as e:
  700. # Common.logger(log_type, crawler).error(f'合集:{item_id},没有获取到合集详情,原因:{e}')
  701. # continue
  702. # for video in p_res['data']:
  703. # item_id = video['item_id']
  704. # try:
  705. # video_dict = cls.get_video_info(log_type, crawler, item_id)
  706. # video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  707. # video_dict['video_width'] = video_url_dict["video_width"]
  708. # video_dict['video_height'] = video_url_dict["video_height"]
  709. # video_dict['audio_url'] = video_url_dict["audio_url"]
  710. # video_dict['video_url'] = video_url_dict["video_url"]
  711. # video_dict['session'] = signature
  712. # except Exception as e:
  713. # Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
  714. # continue
  715. # if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  716. # Common.logger(log_type, crawler).info(
  717. # f'gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  718. # continue
  719. # if not cls.is_ruled(log_type, crawler, video_dict, rule_dict):
  720. # Common.logger(log_type, crawler).error(f'视频:{item_id},不符合抓取规则\n')
  721. # continue
  722. # for k, v in video_dict.items():
  723. # Common.logger(log_type, crawler).info(f"{k}:{v}")
  724. # try:
  725. # # print(
  726. # # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  727. # cls.download_publish(
  728. # search_word=search_word,
  729. # log_type=log_type,
  730. # crawler=crawler,
  731. # video_dict=video_dict,
  732. # rule_dict=rule_dict,
  733. # strategy=strategy,
  734. # our_uid=our_uid,
  735. # oss_endpoint=oss_endpoint,
  736. # env=env,
  737. # machine=machine
  738. # )
  739. # total_count += 1
  740. # if total_count >= 30:
  741. # return
  742. # else:
  743. # break
  744. # except Exception as e:
  745. # Common.logger(log_type, crawler).error(f'视频:{item_id},download_publish异常:{e}\n')
  746. offset += 10
  747. @classmethod
  748. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  749. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  750. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  751. return len(repeat_video)
  752. # 下载 / 上传
  753. @classmethod
  754. def download_publish(cls, log_type, crawler, search_word, strategy, video_dict, rule_dict, our_uid, oss_endpoint,
  755. env, machine):
  756. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video',
  757. title=video_dict['video_title'], url=video_dict['video_url'])
  758. # 下载音频
  759. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio',
  760. title=video_dict['video_title'], url=video_dict['audio_url'])
  761. # 合成音视频
  762. Common.video_compose(log_type=log_type, crawler=crawler,
  763. video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  764. md_title = md5(video_dict['video_title'].encode('utf8')).hexdigest()
  765. if os.path.getsize(f"./{crawler}/videos/{md_title}/video.mp4") == 0:
  766. # 删除视频文件夹
  767. shutil.rmtree(f"./{crawler}/videos/{md_title}")
  768. Common.logger(log_type, crawler).info("视频size=0,删除成功\n")
  769. return
  770. # ffmpeg_dict = Common.ffmpeg(log_type, crawler,
  771. # f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  772. # if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  773. # Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  774. # # 删除视频文件夹
  775. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  776. # return
  777. # 下载封面
  778. Common.download_method(log_type=log_type, crawler=crawler, text='cover',
  779. title=video_dict['video_title'], url=video_dict['cover_url'])
  780. # 保存视频信息至txt
  781. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  782. # 上传视频
  783. Common.logger(log_type, crawler).info("开始上传视频...")
  784. our_video_id = Publish.upload_and_publish(log_type=log_type,
  785. crawler=crawler,
  786. strategy=strategy,
  787. our_uid=our_uid,
  788. env=env,
  789. oss_endpoint=oss_endpoint)
  790. if env == 'dev':
  791. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  792. else:
  793. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  794. Common.logger(log_type, crawler).info("视频上传完成")
  795. if our_video_id is None:
  796. # 删除视频文件夹
  797. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  798. return
  799. # 视频写入飞书
  800. Feishu.insert_columns(log_type, 'xigua', "BUNvGC", "ROWS", 1, 2)
  801. upload_time = int(time.time())
  802. values = [[
  803. search_word,
  804. time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  805. "关键词搜索",
  806. video_dict['video_title'],
  807. str(video_dict['video_id']),
  808. our_video_link,
  809. video_dict['gid'],
  810. video_dict['play_cnt'],
  811. video_dict['comment_cnt'],
  812. video_dict['like_cnt'],
  813. video_dict['share_cnt'],
  814. video_dict['duration'],
  815. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  816. video_dict['publish_time_str'],
  817. video_dict['user_name'],
  818. video_dict['user_id'],
  819. video_dict['avatar_url'],
  820. video_dict['cover_url'],
  821. video_dict['video_url'],
  822. video_dict['audio_url']]]
  823. time.sleep(1)
  824. Feishu.update_values(log_type, 'xigua', "BUNvGC", "E2:Z2", values)
  825. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  826. # 视频信息保存数据库
  827. insert_sql = f""" insert into crawler_video(video_id,
  828. user_id,
  829. out_user_id,
  830. platform,
  831. strategy,
  832. out_video_id,
  833. video_title,
  834. cover_url,
  835. video_url,
  836. duration,
  837. publish_time,
  838. play_cnt,
  839. crawler_rule,
  840. width,
  841. height)
  842. values({our_video_id},
  843. {our_uid},
  844. "{video_dict['user_id']}",
  845. "{cls.platform}",
  846. "定向爬虫策略",
  847. "{video_dict['video_id']}",
  848. "{video_dict['video_title']}",
  849. "{video_dict['cover_url']}",
  850. "{video_dict['video_url']}",
  851. {int(video_dict['duration'])},
  852. "{video_dict['publish_time_str']}",
  853. {int(video_dict['play_cnt'])},
  854. '{json.dumps(rule_dict)}',
  855. {int(video_dict['video_width'])},
  856. {int(video_dict['video_height'])}) """
  857. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  858. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  859. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  860. @classmethod
  861. def get_search_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  862. try:
  863. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="SSPNPW", env=env,
  864. machine=machine)
  865. for user in user_list:
  866. search_word = user["search_word"]
  867. our_uid = user["our_uid"]
  868. Common.logger(log_type, crawler).info(f"开始抓取 {search_word} 用户主页视频\n")
  869. cls.get_videolist(log_type=log_type,
  870. crawler=crawler,
  871. strategy=strategy,
  872. our_uid=our_uid,
  873. search_word=search_word,
  874. oss_endpoint=oss_endpoint,
  875. env=env,
  876. machine=machine)
  877. except Exception as e:
  878. Common.logger(log_type, crawler).error(f"get_search_videos:{e}\n")
  879. if __name__ == '__main__':
  880. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  881. # XiguaSearch.get_search_videos('search', 'xigua', 'xigua_search', 'inner', 'prod', 'aliyun')
  882. # Follow.get_videolist(log_type="follow",
  883. # crawler="xigua",
  884. # strategy="定向爬虫策略",
  885. # our_uid="6267141",
  886. # out_uid="95420624045",
  887. # oss_endpoint="out",
  888. # env="dev",
  889. # machine="local")
  890. # print(Follow.random_signature())
  891. # rule = Follow.get_rule("follow", "xigua")
  892. # print(type(rule))
  893. # print(type(json.dumps(rule)))
  894. # print(json.dumps(rule))
  895. pass