xigua_search.py 57 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. from hashlib import md5
  13. import requests
  14. import urllib3
  15. from urllib.parse import quote
  16. from requests.adapters import HTTPAdapter
  17. sys.path.append(os.getcwd())
  18. from common.db import MysqlHelper
  19. from common.getuser import getUser
  20. from common.common import Common
  21. from common.feishu import Feishu
  22. from common.publish import Publish
  23. from common.public import get_config_from_mysql
  24. from common.userAgent import get_random_user_agent, get_random_header
  25. class XiguaSearch:
  26. platform = "西瓜视频"
  27. tag = "西瓜视频爬虫,搜索爬虫策略"
  28. @classmethod
  29. def get_rule(cls, log_type, crawler):
  30. try:
  31. while True:
  32. rule_sheet = Feishu.get_values_batch(log_type, crawler, "shxOl7")
  33. if rule_sheet is None:
  34. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  35. time.sleep(10)
  36. continue
  37. rule_dict = {
  38. "play_cnt": int(rule_sheet[1][2]),
  39. "min_duration": int(rule_sheet[2][2]),
  40. "max_duration": int(rule_sheet[3][2]),
  41. "publish_time": int(rule_sheet[4][2]),
  42. }
  43. return rule_dict
  44. except Exception as e:
  45. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  46. # 下载规则
  47. @classmethod
  48. def download_rule(cls, video_info_dict, rule_dict):
  49. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  50. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  51. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  52. if video_info_dict['duration'] >= rule_dict['duration']:
  53. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  54. or video_info_dict['video_height'] >= rule_dict['video_height']:
  55. return True
  56. else:
  57. return False
  58. else:
  59. return False
  60. else:
  61. return False
  62. else:
  63. return False
  64. else:
  65. return False
  66. # 过滤词库
  67. @classmethod
  68. def filter_words(cls, log_type, crawler):
  69. try:
  70. while True:
  71. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  72. if filter_words_sheet is None:
  73. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  74. continue
  75. filter_words_list = []
  76. for x in filter_words_sheet:
  77. for y in x:
  78. if y is None:
  79. pass
  80. else:
  81. filter_words_list.append(y)
  82. return filter_words_list
  83. except Exception as e:
  84. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  85. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  86. @classmethod
  87. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  88. try:
  89. while True:
  90. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  91. if user_sheet is None:
  92. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  93. continue
  94. our_user_list = []
  95. for i in range(1, len(user_sheet)):
  96. our_uid = user_sheet[i][6]
  97. search_word = user_sheet[i][4]
  98. tag1 = user_sheet[i][8]
  99. tag2 = user_sheet[i][9]
  100. tag3 = user_sheet[i][10]
  101. tag4 = user_sheet[i][11]
  102. tag5 = user_sheet[i][12]
  103. tag6 = user_sheet[i][13]
  104. tag7 = user_sheet[i][14]
  105. Common.logger(log_type, crawler).info(f"正在更新 {search_word} 关键词信息\n")
  106. if our_uid is None:
  107. default_user = getUser.get_default_user()
  108. # 用来创建our_id的信息
  109. user_dict = {
  110. 'recommendStatus': -6,
  111. 'appRecommendStatus': -6,
  112. 'nickName': default_user['nickName'],
  113. 'avatarUrl': default_user['avatarUrl'],
  114. 'tagName': f'{tag1},{tag2},{tag3},{tag4},{tag5},{tag6},{tag7}',
  115. }
  116. Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
  117. our_uid = getUser.create_uid(log_type, crawler, user_dict, env)
  118. if env == 'prod':
  119. our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
  120. else:
  121. our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
  122. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  123. [[our_uid, our_user_link]])
  124. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  125. our_user_dict = {
  126. 'out_uid': '',
  127. 'search_word': search_word,
  128. 'our_uid': our_uid,
  129. 'our_user_link': f'https://admin.piaoquantv.com/ums/user/{our_uid}/post',
  130. }
  131. our_user_list.append(our_user_dict)
  132. return our_user_list
  133. except Exception as e:
  134. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  135. @classmethod
  136. def random_signature(cls):
  137. src_digits = string.digits # string_数字
  138. src_uppercase = string.ascii_uppercase # string_大写字母
  139. src_lowercase = string.ascii_lowercase # string_小写字母
  140. digits_num = random.randint(1, 6)
  141. uppercase_num = random.randint(1, 26 - digits_num - 1)
  142. lowercase_num = 26 - (digits_num + uppercase_num)
  143. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  144. src_lowercase, lowercase_num)
  145. random.shuffle(password)
  146. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  147. new_password_start = new_password[0:18]
  148. new_password_end = new_password[-7:]
  149. if new_password[18] == '8':
  150. new_password = new_password_start + 'w' + new_password_end
  151. elif new_password[18] == '9':
  152. new_password = new_password_start + 'x' + new_password_end
  153. elif new_password[18] == '-':
  154. new_password = new_password_start + 'y' + new_password_end
  155. elif new_password[18] == '.':
  156. new_password = new_password_start + 'z' + new_password_end
  157. else:
  158. new_password = new_password_start + 'y' + new_password_end
  159. return new_password
  160. # 获取视频详情
  161. @classmethod
  162. def get_video_url(cls, log_type, crawler, gid):
  163. try:
  164. url = 'https://www.ixigua.com/api/mixVideo/information?'
  165. headers = {
  166. "accept-encoding": "gzip, deflate",
  167. "accept-language": "zh-CN,zh-Hans;q=0.9",
  168. "user-agent": get_random_user_agent('pc'),
  169. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  170. }
  171. params = {
  172. 'mixId': gid,
  173. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  174. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  175. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  176. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  177. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  178. }
  179. cookies = {
  180. 'ixigua-a-s': '1',
  181. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  182. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  183. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  184. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  185. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  186. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  187. '__ac_nonce': '06304878000964fdad287',
  188. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  189. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  190. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  191. '_tea_utm_cache_1300': 'undefined',
  192. 'support_avif': 'false',
  193. 'support_webp': 'false',
  194. 'xiguavideopcwebid': '7134967546256016900',
  195. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  196. }
  197. urllib3.disable_warnings()
  198. s = requests.session()
  199. # max_retries=3 重试3次
  200. s.mount('http://', HTTPAdapter(max_retries=3))
  201. s.mount('https://', HTTPAdapter(max_retries=3))
  202. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False,
  203. proxies=Common.tunnel_proxies(), timeout=5)
  204. # response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False)
  205. response.close()
  206. if 'data' not in response.json() or response.json()['data'] == '':
  207. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  208. else:
  209. video_info = response.json()['data']['gidInformation']['packerData']['video']
  210. video_url_dict = {}
  211. # video_url
  212. if 'videoResource' not in video_info:
  213. video_url_dict["video_url"] = ''
  214. video_url_dict["audio_url"] = ''
  215. video_url_dict["video_width"] = 0
  216. video_url_dict["video_height"] = 0
  217. elif 'dash_120fps' in video_info['videoResource']:
  218. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in \
  219. video_info['videoResource']['dash_120fps']['video_list']:
  220. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  221. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  222. if len(video_url) % 3 == 1:
  223. video_url += '=='
  224. elif len(video_url) % 3 == 2:
  225. video_url += '='
  226. elif len(audio_url) % 3 == 1:
  227. audio_url += '=='
  228. elif len(audio_url) % 3 == 2:
  229. audio_url += '='
  230. video_url = base64.b64decode(video_url).decode('utf8')
  231. audio_url = base64.b64decode(audio_url).decode('utf8')
  232. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  233. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  234. video_url_dict["video_url"] = video_url
  235. video_url_dict["audio_url"] = audio_url
  236. video_url_dict["video_width"] = video_width
  237. video_url_dict["video_height"] = video_height
  238. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in \
  239. video_info['videoResource']['dash_120fps']['video_list']:
  240. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  241. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  242. if len(video_url) % 3 == 1:
  243. video_url += '=='
  244. elif len(video_url) % 3 == 2:
  245. video_url += '='
  246. elif len(audio_url) % 3 == 1:
  247. audio_url += '=='
  248. elif len(audio_url) % 3 == 2:
  249. audio_url += '='
  250. video_url = base64.b64decode(video_url).decode('utf8')
  251. audio_url = base64.b64decode(audio_url).decode('utf8')
  252. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  253. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  254. video_url_dict["video_url"] = video_url
  255. video_url_dict["audio_url"] = audio_url
  256. video_url_dict["video_width"] = video_width
  257. video_url_dict["video_height"] = video_height
  258. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in \
  259. video_info['videoResource']['dash_120fps']['video_list']:
  260. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  261. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  262. if len(video_url) % 3 == 1:
  263. video_url += '=='
  264. elif len(video_url) % 3 == 2:
  265. video_url += '='
  266. elif len(audio_url) % 3 == 1:
  267. audio_url += '=='
  268. elif len(audio_url) % 3 == 2:
  269. audio_url += '='
  270. video_url = base64.b64decode(video_url).decode('utf8')
  271. audio_url = base64.b64decode(audio_url).decode('utf8')
  272. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  273. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  274. video_url_dict["video_url"] = video_url
  275. video_url_dict["audio_url"] = audio_url
  276. video_url_dict["video_width"] = video_width
  277. video_url_dict["video_height"] = video_height
  278. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in \
  279. video_info['videoResource']['dash_120fps']['video_list']:
  280. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  281. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  282. if len(video_url) % 3 == 1:
  283. video_url += '=='
  284. elif len(video_url) % 3 == 2:
  285. video_url += '='
  286. elif len(audio_url) % 3 == 1:
  287. audio_url += '=='
  288. elif len(audio_url) % 3 == 2:
  289. audio_url += '='
  290. video_url = base64.b64decode(video_url).decode('utf8')
  291. audio_url = base64.b64decode(audio_url).decode('utf8')
  292. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  293. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  294. video_url_dict["video_url"] = video_url
  295. video_url_dict["audio_url"] = audio_url
  296. video_url_dict["video_width"] = video_width
  297. video_url_dict["video_height"] = video_height
  298. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  299. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  300. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  301. and len(
  302. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  303. and len(
  304. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  305. video_url = \
  306. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  307. 'backup_url_1']
  308. audio_url = \
  309. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1][
  310. 'backup_url_1']
  311. if len(video_url) % 3 == 1:
  312. video_url += '=='
  313. elif len(video_url) % 3 == 2:
  314. video_url += '='
  315. elif len(audio_url) % 3 == 1:
  316. audio_url += '=='
  317. elif len(audio_url) % 3 == 2:
  318. audio_url += '='
  319. video_url = base64.b64decode(video_url).decode('utf8')
  320. audio_url = base64.b64decode(audio_url).decode('utf8')
  321. video_width = \
  322. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  323. 'vwidth']
  324. video_height = \
  325. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  326. 'vheight']
  327. video_url_dict["video_url"] = video_url
  328. video_url_dict["audio_url"] = audio_url
  329. video_url_dict["video_width"] = video_width
  330. video_url_dict["video_height"] = video_height
  331. else:
  332. video_url_dict["video_url"] = ''
  333. video_url_dict["audio_url"] = ''
  334. video_url_dict["video_width"] = 0
  335. video_url_dict["video_height"] = 0
  336. elif 'dash' in video_info['videoResource']:
  337. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in \
  338. video_info['videoResource']['dash']['video_list']:
  339. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  340. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  341. if len(video_url) % 3 == 1:
  342. video_url += '=='
  343. elif len(video_url) % 3 == 2:
  344. video_url += '='
  345. elif len(audio_url) % 3 == 1:
  346. audio_url += '=='
  347. elif len(audio_url) % 3 == 2:
  348. audio_url += '='
  349. video_url = base64.b64decode(video_url).decode('utf8')
  350. audio_url = base64.b64decode(audio_url).decode('utf8')
  351. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  352. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  353. video_url_dict["video_url"] = video_url
  354. video_url_dict["audio_url"] = audio_url
  355. video_url_dict["video_width"] = video_width
  356. video_url_dict["video_height"] = video_height
  357. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in \
  358. video_info['videoResource']['dash']['video_list']:
  359. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  360. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  361. if len(video_url) % 3 == 1:
  362. video_url += '=='
  363. elif len(video_url) % 3 == 2:
  364. video_url += '='
  365. elif len(audio_url) % 3 == 1:
  366. audio_url += '=='
  367. elif len(audio_url) % 3 == 2:
  368. audio_url += '='
  369. video_url = base64.b64decode(video_url).decode('utf8')
  370. audio_url = base64.b64decode(audio_url).decode('utf8')
  371. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  372. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  373. video_url_dict["video_url"] = video_url
  374. video_url_dict["audio_url"] = audio_url
  375. video_url_dict["video_width"] = video_width
  376. video_url_dict["video_height"] = video_height
  377. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in \
  378. video_info['videoResource']['dash']['video_list']:
  379. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  380. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  381. if len(video_url) % 3 == 1:
  382. video_url += '=='
  383. elif len(video_url) % 3 == 2:
  384. video_url += '='
  385. elif len(audio_url) % 3 == 1:
  386. audio_url += '=='
  387. elif len(audio_url) % 3 == 2:
  388. audio_url += '='
  389. video_url = base64.b64decode(video_url).decode('utf8')
  390. audio_url = base64.b64decode(audio_url).decode('utf8')
  391. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  392. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  393. video_url_dict["video_url"] = video_url
  394. video_url_dict["audio_url"] = audio_url
  395. video_url_dict["video_width"] = video_width
  396. video_url_dict["video_height"] = video_height
  397. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in \
  398. video_info['videoResource']['dash']['video_list']:
  399. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  400. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  401. if len(video_url) % 3 == 1:
  402. video_url += '=='
  403. elif len(video_url) % 3 == 2:
  404. video_url += '='
  405. elif len(audio_url) % 3 == 1:
  406. audio_url += '=='
  407. elif len(audio_url) % 3 == 2:
  408. audio_url += '='
  409. video_url = base64.b64decode(video_url).decode('utf8')
  410. audio_url = base64.b64decode(audio_url).decode('utf8')
  411. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  412. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  413. video_url_dict["video_url"] = video_url
  414. video_url_dict["audio_url"] = audio_url
  415. video_url_dict["video_width"] = video_width
  416. video_url_dict["video_height"] = video_height
  417. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  418. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  419. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  420. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  421. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  422. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  423. 'backup_url_1']
  424. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1][
  425. 'backup_url_1']
  426. if len(video_url) % 3 == 1:
  427. video_url += '=='
  428. elif len(video_url) % 3 == 2:
  429. video_url += '='
  430. elif len(audio_url) % 3 == 1:
  431. audio_url += '=='
  432. elif len(audio_url) % 3 == 2:
  433. audio_url += '='
  434. video_url = base64.b64decode(video_url).decode('utf8')
  435. audio_url = base64.b64decode(audio_url).decode('utf8')
  436. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  437. 'vwidth']
  438. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  439. 'vheight']
  440. video_url_dict["video_url"] = video_url
  441. video_url_dict["audio_url"] = audio_url
  442. video_url_dict["video_width"] = video_width
  443. video_url_dict["video_height"] = video_height
  444. else:
  445. video_url_dict["video_url"] = ''
  446. video_url_dict["audio_url"] = ''
  447. video_url_dict["video_width"] = 0
  448. video_url_dict["video_height"] = 0
  449. elif 'normal' in video_info['videoResource']:
  450. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  451. video_info['videoResource']['normal']['video_list']:
  452. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  453. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  454. if len(video_url) % 3 == 1:
  455. video_url += '=='
  456. elif len(video_url) % 3 == 2:
  457. video_url += '='
  458. elif len(audio_url) % 3 == 1:
  459. audio_url += '=='
  460. elif len(audio_url) % 3 == 2:
  461. audio_url += '='
  462. video_url = base64.b64decode(video_url).decode('utf8')
  463. audio_url = base64.b64decode(audio_url).decode('utf8')
  464. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  465. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  466. video_url_dict["video_url"] = video_url
  467. video_url_dict["audio_url"] = audio_url
  468. video_url_dict["video_width"] = video_width
  469. video_url_dict["video_height"] = video_height
  470. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  471. video_info['videoResource']['normal']['video_list']:
  472. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  473. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  474. if len(video_url) % 3 == 1:
  475. video_url += '=='
  476. elif len(video_url) % 3 == 2:
  477. video_url += '='
  478. elif len(audio_url) % 3 == 1:
  479. audio_url += '=='
  480. elif len(audio_url) % 3 == 2:
  481. audio_url += '='
  482. video_url = base64.b64decode(video_url).decode('utf8')
  483. audio_url = base64.b64decode(audio_url).decode('utf8')
  484. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  485. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  486. video_url_dict["video_url"] = video_url
  487. video_url_dict["audio_url"] = audio_url
  488. video_url_dict["video_width"] = video_width
  489. video_url_dict["video_height"] = video_height
  490. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  491. video_info['videoResource']['normal']['video_list']:
  492. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  493. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  494. if len(video_url) % 3 == 1:
  495. video_url += '=='
  496. elif len(video_url) % 3 == 2:
  497. video_url += '='
  498. elif len(audio_url) % 3 == 1:
  499. audio_url += '=='
  500. elif len(audio_url) % 3 == 2:
  501. audio_url += '='
  502. video_url = base64.b64decode(video_url).decode('utf8')
  503. audio_url = base64.b64decode(audio_url).decode('utf8')
  504. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  505. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  506. video_url_dict["video_url"] = video_url
  507. video_url_dict["audio_url"] = audio_url
  508. video_url_dict["video_width"] = video_width
  509. video_url_dict["video_height"] = video_height
  510. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  511. video_info['videoResource']['normal']['video_list']:
  512. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  513. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  514. if len(video_url) % 3 == 1:
  515. video_url += '=='
  516. elif len(video_url) % 3 == 2:
  517. video_url += '='
  518. elif len(audio_url) % 3 == 1:
  519. audio_url += '=='
  520. elif len(audio_url) % 3 == 2:
  521. audio_url += '='
  522. video_url = base64.b64decode(video_url).decode('utf8')
  523. audio_url = base64.b64decode(audio_url).decode('utf8')
  524. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  525. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  526. video_url_dict["video_url"] = video_url
  527. video_url_dict["audio_url"] = audio_url
  528. video_url_dict["video_width"] = video_width
  529. video_url_dict["video_height"] = video_height
  530. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  531. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  532. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  533. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  534. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  535. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  536. 'backup_url_1']
  537. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  538. 'backup_url_1']
  539. if len(video_url) % 3 == 1:
  540. video_url += '=='
  541. elif len(video_url) % 3 == 2:
  542. video_url += '='
  543. elif len(audio_url) % 3 == 1:
  544. audio_url += '=='
  545. elif len(audio_url) % 3 == 2:
  546. audio_url += '='
  547. video_url = base64.b64decode(video_url).decode('utf8')
  548. audio_url = base64.b64decode(audio_url).decode('utf8')
  549. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  550. 'vwidth']
  551. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  552. 'vheight']
  553. video_url_dict["video_url"] = video_url
  554. video_url_dict["audio_url"] = audio_url
  555. video_url_dict["video_width"] = video_width
  556. video_url_dict["video_height"] = video_height
  557. else:
  558. video_url_dict["video_url"] = ''
  559. video_url_dict["audio_url"] = ''
  560. video_url_dict["video_width"] = 0
  561. video_url_dict["video_height"] = 0
  562. else:
  563. video_url_dict["video_url"] = ''
  564. video_url_dict["audio_url"] = ''
  565. video_url_dict["video_width"] = 0
  566. video_url_dict["video_height"] = 0
  567. return video_url_dict
  568. except Exception as e:
  569. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  570. @classmethod
  571. def get_video_info(cls, log_type, crawler, item_id):
  572. d_url = "http://a6.pstatp.com/article/full/11/1/{video_id}/{video_id}/1/0/?iid=3636030325&device_id=5787057242" \
  573. "&ac=wifi&channel=wandoujia&aid=13&app_name=news_article&version_code=532&version_name=5.3.2&device_platform" \
  574. "=android&ab_client=a1%2Cc2%2Ce1%2Cf2%2Cg2%2Cb3%2Cf4&abflag=3&ssmix=a&device_type=SM705" \
  575. "&device_brand=smartisan&os_api=19&os_version=4.4.2&uuid=864593021012562&openudid=e23a5ff037ef2d1a" \
  576. "&manifest_version_code=532&resolution=1080*1920&dpi=480&update_version_code=5320".format(
  577. video_id=item_id)
  578. res = requests.get(url=d_url, headers=get_random_header('pc'), proxies=Common.tunnel_proxies())
  579. data = json.loads(res.text)['data']
  580. item_counter = data['h5_extra']['itemCell']['itemCounter']
  581. user_info = data['user_info']
  582. detail_info = data['video_detail_info']
  583. video_dict = {'video_title': data['title'].replace('"', '').replace("'", ''),
  584. 'video_id': detail_info['video_id'],
  585. 'gid': data['group_id'],
  586. 'play_cnt': item_counter['videoWatchCount'],
  587. 'comment_cnt': item_counter['commentCount'],
  588. 'like_cnt': item_counter['diggCount'],
  589. 'share_cnt': item_counter['shareCount'],
  590. 'duration': data['video_duration'],
  591. 'publish_time_stamp': data['publish_time'],
  592. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S",
  593. time.localtime(data['publish_time'])),
  594. 'user_name': user_info['name'],
  595. 'user_id': user_info['user_id'],
  596. 'avatar_url': user_info['avatar_url'],
  597. 'cover_url': data['large_image']['url'].replace('\u0026', '&'),
  598. }
  599. return video_dict
  600. @classmethod
  601. def is_ruled(cls, log_type, crawler, video_dict, rule_dict):
  602. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  603. if video_dict['publish_time_stamp'] <= old_time:
  604. return False
  605. elif video_dict['play_cnt'] <= rule_dict['play_cnt']:
  606. return False
  607. elif video_dict['duration'] < rule_dict['min_duration'] or video_dict['duration'] > rule_dict['max_duration']:
  608. return False
  609. else:
  610. return True
  611. @classmethod
  612. def get_videolist(cls, log_type, crawler, strategy, our_uid, search_word, oss_endpoint, env, machine):
  613. total_count = 1
  614. offset = 0
  615. while True:
  616. signature = cls.random_signature()
  617. # url = "https://www.ixigua.com/api/searchv2/complex/{}/{}?order_type=publish_time&click_position=new".format(
  618. # quote(search_word), offset, signature)
  619. url = f'https://www.ixigua.com/api/searchv2/complex/{quote(search_word)}/{offset}?' \
  620. f'search_id=202305111126371489381ECEC7FE277E3F&' \
  621. f'aid=1768&' \
  622. f'msToken=lPfIf3aps6EktQAeOl9yRgnL44MtMeGt2WnHjahIR0IysASB_zdhGiY0J9WWxNDpLd7aVdQx_36MpyPI5f2zRUHFYyNNsX5cl-or6GkiVuLLiRsU3ylxj9vt7Upubw==&' \
  623. f'X-Bogus=DFSzswVY4h0ANGD7tC7G/Mm4pIkV&' \
  624. f'_signature={signature}'
  625. headers = {
  626. 'referer': 'https://www.ixigua.com/search/{}/?logTag=594535e3690f17a88cdb&tab_name=search'.format(
  627. quote(search_word)),
  628. 'cookie': 'ttcid=5d8f917a525e46759dc886296bf1111b69; MONITOR_WEB_ID=ad1c8360-d4c9-4fa2-a801-d9fd68dfc1b2; s_v_web_id=verify_lh8vaa6v_VI4RQ0ET_nVbq_4PXw_8mfN_7Xp6wdLOZi08; passport_csrf_token=0e7c6992cb6170c9db034c3696191fff; passport_csrf_token_default=0e7c6992cb6170c9db034c3696191fff; support_webp=true; support_avif=true; csrf_session_id=a5355d954d3c63ed1ba35faada452b4d; odin_tt=3072e827705bd5aa707fb8d432524d7f8fad972b02b31a2d3458a3e5209d5492; sid_guard=46a52ce83dacb0b871dae675476a3e42%7C1683773717%7C21600%7CThu%2C+11-May-2023+08%3A55%3A17+GMT; uid_tt=4126f296856e6042f195253e9a01c4cb; uid_tt_ss=4126f296856e6042f195253e9a01c4cb; sid_tt=46a52ce83dacb0b871dae675476a3e42; sessionid=46a52ce83dacb0b871dae675476a3e42; sessionid_ss=46a52ce83dacb0b871dae675476a3e42; sid_ucp_v1=1.0.0-KDMyMzg5NWI3YzAxMGFkN2Y4MjZiMzE5Njc0MGFmMWQ5NGExY2MyYzgKCBCVsvGiBhgNGgJobCIgNDZhNTJjZTgzZGFjYjBiODcxZGFlNjc1NDc2YTNlNDI; ssid_ucp_v1=1.0.0-KDMyMzg5NWI3YzAxMGFkN2Y4MjZiMzE5Njc0MGFmMWQ5NGExY2MyYzgKCBCVsvGiBhgNGgJobCIgNDZhNTJjZTgzZGFjYjBiODcxZGFlNjc1NDc2YTNlNDI; ixigua-a-s=1; tt_scid=sblZQP6nSw2f6A.XS-yHFqB.R3o9UFsRTUCKAoWlHWzNrOf8R01qeIBbu6TDeXtMa3fb; ttwid=1%7C4zaTJmlaHpEa8rAB-KjREdxT3sNBUJWrAzRJnNvqExQ%7C1683775619%7Cf4fc6fa51baf2e302242da412ead6500c3d3f5bfb0be6253cbae00301d5773ae; msToken=lPfIf3aps6EktQAeOl9yRgnL44MtMeGt2WnHjahIR0IysASB_zdhGiY0J9WWxNDpLd7aVdQx_36MpyPI5f2zRUHFYyNNsX5cl-or6GkiVuLLiRsU3ylxj9vt7Upubw==',
  629. 'user-agent': get_random_user_agent('pc'),
  630. }
  631. try:
  632. proxies = Common.tunnel_proxies()
  633. s = requests.session()
  634. # max_retries=3 重试3次
  635. s.mount('http://', HTTPAdapter(max_retries=3))
  636. s.mount('https://', HTTPAdapter(max_retries=3))
  637. res = s.request("GET", url, headers=headers, proxies=proxies, timeout=5)
  638. # Common.logger(log_type, crawler).info(f"proxies:{proxies}\n")
  639. Common.logger(log_type, crawler).info(f"get_videolist:{res.json()}\n")
  640. search_list = res.json()['data']['data']
  641. except Exception as e:
  642. Common.logger(log_type, crawler).warning(f"get_videolist:{e}\n")
  643. continue
  644. if not search_list:
  645. Common.logger(log_type, crawler).error(f'关键词:{search_word},没有获取到视频列表:offset{offset}')
  646. return
  647. for video_info in search_list:
  648. v_type = video_info['type']
  649. rule_dict = cls.get_rule(log_type, crawler)
  650. publish_time = video_info['data']['publish_time']
  651. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  652. if publish_time <= old_time:
  653. Common.logger(log_type, crawler).error(f'关键词:{search_word},抓取完毕,退出抓取\n')
  654. return
  655. if v_type == 'video':
  656. item_id = video_info['data']['group_id']
  657. if video_info['data']['publish_time'] <= old_time:
  658. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  659. continue
  660. elif video_info['data']['video_watch_count'] <= rule_dict['play_cnt']:
  661. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  662. continue
  663. elif video_info['data']['video_time'] < rule_dict['min_duration'] or video_info['data'][
  664. 'video_time'] > rule_dict['max_duration']:
  665. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  666. continue
  667. try:
  668. video_dict = cls.get_video_info(log_type, crawler, item_id)
  669. filter_words = get_config_from_mysql(log_type, crawler, env, text='filter')
  670. is_filter = False
  671. for filter_word in filter_words:
  672. if filter_word in video_dict['video_title']:
  673. is_filter = True
  674. break
  675. if is_filter:
  676. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  677. continue
  678. video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  679. video_dict['video_width'] = video_url_dict["video_width"]
  680. video_dict['video_height'] = video_url_dict["video_height"]
  681. video_dict['audio_url'] = video_url_dict["audio_url"]
  682. video_dict['video_url'] = video_url_dict["video_url"]
  683. video_dict['session'] = signature
  684. except Exception as e:
  685. Common.logger(log_type, crawler).error(
  686. f'关键词:{search_word},视频:{item_id},获取详情失败,原因:{e}')
  687. continue
  688. if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  689. Common.logger(log_type, crawler).info(
  690. f'关键词:{search_word},gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  691. continue
  692. for k, v in video_dict.items():
  693. Common.logger(log_type, crawler).info(f"{k}:{v}")
  694. try:
  695. # print(
  696. # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  697. cls.download_publish(
  698. search_word=search_word,
  699. log_type=log_type,
  700. crawler=crawler,
  701. video_dict=video_dict,
  702. rule_dict=rule_dict,
  703. strategy=strategy,
  704. our_uid=our_uid,
  705. oss_endpoint=oss_endpoint,
  706. env=env,
  707. machine=machine
  708. )
  709. except Exception as e:
  710. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},下载失败,原因:{e}')
  711. continue
  712. total_count += 1
  713. Common.logger(log_type, crawler).info(
  714. f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  715. if total_count >= 30:
  716. return
  717. # elif v_type == 'pseries':
  718. # try:
  719. # item_id = video_info['data']['group_id']
  720. # p_url = "https://www.ixigua.com/api/videov2/pseries_more_v2?pSeriesId={}&rank=0&tailCount=30&aid=1768&msToken=wHEafKFLx0k3hihOPbhXYNsfMBxWiq2AB0K5R-34kEFixyq3ATi_DuXbL4Q47J9C2uK2zgWItMa1g2yc4FyDxM4dMijmSdwF4c4T8sSmOkoOI0wGzeEcPw==&X-Bogus=DFSzswVOzdUANG3ItaVHYr7TlqCv&_signature=_02B4Z6wo00001vB6l3QAAIDBZKzMeTihTmbwepPAANgh1Ai3JgFFo4e6anoezmBEpHfEMEYlWISGhXI-QKfev4N-2bwgXsHOuNGLnOsGqMbANIjFPh7Yj6OakQWrkbACenlv0P-arswtB6Zn45".format(
  721. # item_id)
  722. # p_headers = {
  723. # 'referer': 'https://www.ixigua.com/{}?series_flow=1&logTag=cfec9d927da968feff89'.format(
  724. # item_id),
  725. # 'user-agent': get_random_user_agent('pc'),
  726. # }
  727. # p_res = requests.request("GET", p_url, headers=p_headers,
  728. # proxies=Common.tunnel_proxies()).json()
  729. # except Exception as e:
  730. # Common.logger(log_type, crawler).error(f'合集:{item_id},没有获取到合集详情,原因:{e}')
  731. # continue
  732. # for video in p_res['data']:
  733. # item_id = video['item_id']
  734. # try:
  735. # video_dict = cls.get_video_info(log_type, crawler, item_id)
  736. # video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  737. # video_dict['video_width'] = video_url_dict["video_width"]
  738. # video_dict['video_height'] = video_url_dict["video_height"]
  739. # video_dict['audio_url'] = video_url_dict["audio_url"]
  740. # video_dict['video_url'] = video_url_dict["video_url"]
  741. # video_dict['session'] = signature
  742. # except Exception as e:
  743. # Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
  744. # continue
  745. # if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  746. # Common.logger(log_type, crawler).info(
  747. # f'gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  748. # continue
  749. # if not cls.is_ruled(log_type, crawler, video_dict, rule_dict):
  750. # Common.logger(log_type, crawler).error(f'视频:{item_id},不符合抓取规则\n')
  751. # continue
  752. # for k, v in video_dict.items():
  753. # Common.logger(log_type, crawler).info(f"{k}:{v}")
  754. # try:
  755. # # print(
  756. # # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  757. # cls.download_publish(
  758. # search_word=search_word,
  759. # log_type=log_type,
  760. # crawler=crawler,
  761. # video_dict=video_dict,
  762. # rule_dict=rule_dict,
  763. # strategy=strategy,
  764. # our_uid=our_uid,
  765. # oss_endpoint=oss_endpoint,
  766. # env=env,
  767. # machine=machine
  768. # )
  769. # total_count += 1
  770. # if total_count >= 30:
  771. # return
  772. # else:
  773. # break
  774. # except Exception as e:
  775. # Common.logger(log_type, crawler).error(f'视频:{item_id},download_publish异常:{e}\n')
  776. offset += 10
  777. @classmethod
  778. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  779. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  780. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  781. return len(repeat_video)
  782. # 下载 / 上传
  783. @classmethod
  784. def download_publish(cls, log_type, crawler, search_word, strategy, video_dict, rule_dict, our_uid, oss_endpoint,
  785. env, machine):
  786. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video',
  787. title=video_dict['video_title'], url=video_dict['video_url'])
  788. # 下载音频
  789. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio',
  790. title=video_dict['video_title'], url=video_dict['audio_url'])
  791. # 合成音视频
  792. Common.video_compose(log_type=log_type, crawler=crawler,
  793. video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  794. md_title = md5(video_dict['video_title'].encode('utf8')).hexdigest()
  795. if os.path.getsize(f"./{crawler}/videos/{md_title}/video.mp4") == 0:
  796. # 删除视频文件夹
  797. shutil.rmtree(f"./{crawler}/videos/{md_title}")
  798. Common.logger(log_type, crawler).info("视频size=0,删除成功\n")
  799. return
  800. # ffmpeg_dict = Common.ffmpeg(log_type, crawler,
  801. # f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  802. # if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  803. # Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  804. # # 删除视频文件夹
  805. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  806. # return
  807. # 下载封面
  808. Common.download_method(log_type=log_type, crawler=crawler, text='cover',
  809. title=video_dict['video_title'], url=video_dict['cover_url'])
  810. # 保存视频信息至txt
  811. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  812. # 上传视频
  813. Common.logger(log_type, crawler).info("开始上传视频...")
  814. our_video_id = Publish.upload_and_publish(log_type=log_type,
  815. crawler=crawler,
  816. strategy=strategy,
  817. our_uid=our_uid,
  818. env=env,
  819. oss_endpoint=oss_endpoint)
  820. if env == 'dev':
  821. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  822. else:
  823. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  824. Common.logger(log_type, crawler).info("视频上传完成")
  825. if our_video_id is None:
  826. # 删除视频文件夹
  827. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  828. return
  829. # 视频写入飞书
  830. Feishu.insert_columns(log_type, 'xigua', "BUNvGC", "ROWS", 1, 2)
  831. upload_time = int(time.time())
  832. values = [[
  833. search_word,
  834. time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  835. "关键词搜索",
  836. video_dict['video_title'],
  837. str(video_dict['video_id']),
  838. our_video_link,
  839. video_dict['gid'],
  840. video_dict['play_cnt'],
  841. video_dict['comment_cnt'],
  842. video_dict['like_cnt'],
  843. video_dict['share_cnt'],
  844. video_dict['duration'],
  845. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  846. video_dict['publish_time_str'],
  847. video_dict['user_name'],
  848. video_dict['user_id'],
  849. video_dict['avatar_url'],
  850. video_dict['cover_url'],
  851. video_dict['video_url'],
  852. video_dict['audio_url']]]
  853. time.sleep(1)
  854. Feishu.update_values(log_type, 'xigua', "BUNvGC", "E2:Z2", values)
  855. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  856. # 视频信息保存数据库
  857. insert_sql = f""" insert into crawler_video(video_id,
  858. user_id,
  859. out_user_id,
  860. platform,
  861. strategy,
  862. out_video_id,
  863. video_title,
  864. cover_url,
  865. video_url,
  866. duration,
  867. publish_time,
  868. play_cnt,
  869. crawler_rule,
  870. width,
  871. height)
  872. values({our_video_id},
  873. {our_uid},
  874. "{video_dict['user_id']}",
  875. "{cls.platform}",
  876. "搜索爬虫策略",
  877. "{video_dict['video_id']}",
  878. "{video_dict['video_title']}",
  879. "{video_dict['cover_url']}",
  880. "{video_dict['video_url']}",
  881. {int(video_dict['duration'])},
  882. "{video_dict['publish_time_str']}",
  883. {int(video_dict['play_cnt'])},
  884. '{json.dumps(rule_dict)}',
  885. {int(video_dict['video_width'])},
  886. {int(video_dict['video_height'])}) """
  887. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  888. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  889. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  890. @classmethod
  891. def get_search_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  892. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="SSPNPW", env=env,
  893. machine=machine)
  894. for user in user_list:
  895. try:
  896. search_word = user["search_word"]
  897. our_uid = user["our_uid"]
  898. Common.logger(log_type, crawler).info(f"开始抓取 {search_word} 用户主页视频\n")
  899. cls.get_videolist(log_type=log_type,
  900. crawler=crawler,
  901. strategy=strategy,
  902. our_uid=our_uid,
  903. search_word=search_word,
  904. oss_endpoint=oss_endpoint,
  905. env=env,
  906. machine=machine)
  907. except Exception as e:
  908. Common.logger(log_type, crawler).error(f"get_search_videos:{e}\n")
  909. if __name__ == '__main__':
  910. XiguaSearch.get_search_videos('search', 'xigua', 'xigua_search', 'out', 'dev', 'aliyun')