xigua_search.py 55 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/17
  4. import base64
  5. import json
  6. import os
  7. import random
  8. import shutil
  9. import string
  10. import sys
  11. import time
  12. from hashlib import md5
  13. import requests
  14. import urllib3
  15. from urllib.parse import quote
  16. from requests.adapters import HTTPAdapter
  17. sys.path.append(os.getcwd())
  18. from common.db import MysqlHelper
  19. from common.getuser import getUser
  20. from common.common import Common
  21. from common.feishu import Feishu
  22. from common.publish import Publish
  23. from common.public import get_config_from_mysql
  24. from common.userAgent import get_random_user_agent, get_random_header
  25. class XiguaSearch:
  26. platform = "西瓜视频"
  27. tag = "西瓜视频爬虫,搜索爬虫策略"
  28. @classmethod
  29. def get_rule(cls, log_type, crawler):
  30. try:
  31. while True:
  32. rule_sheet = Feishu.get_values_batch(log_type, crawler, "shxOl7")
  33. if rule_sheet is None:
  34. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  35. time.sleep(10)
  36. continue
  37. rule_dict = {
  38. "play_cnt": int(rule_sheet[1][2]),
  39. "min_duration": int(rule_sheet[2][2]),
  40. "max_duration": int(rule_sheet[3][2]),
  41. "publish_time": int(rule_sheet[4][2]),
  42. }
  43. return rule_dict
  44. except Exception as e:
  45. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  46. # 下载规则
  47. @classmethod
  48. def download_rule(cls, video_info_dict, rule_dict):
  49. if video_info_dict['play_cnt'] >= rule_dict['play_cnt']:
  50. if video_info_dict['comment_cnt'] >= rule_dict['comment_cnt']:
  51. if video_info_dict['like_cnt'] >= rule_dict['like_cnt']:
  52. if video_info_dict['duration'] >= rule_dict['duration']:
  53. if video_info_dict['video_width'] >= rule_dict['video_width'] \
  54. or video_info_dict['video_height'] >= rule_dict['video_height']:
  55. return True
  56. else:
  57. return False
  58. else:
  59. return False
  60. else:
  61. return False
  62. else:
  63. return False
  64. else:
  65. return False
  66. # 过滤词库
  67. @classmethod
  68. def filter_words(cls, log_type, crawler):
  69. try:
  70. while True:
  71. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'KGB4Hc')
  72. if filter_words_sheet is None:
  73. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  74. continue
  75. filter_words_list = []
  76. for x in filter_words_sheet:
  77. for y in x:
  78. if y is None:
  79. pass
  80. else:
  81. filter_words_list.append(y)
  82. return filter_words_list
  83. except Exception as e:
  84. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  85. # 获取用户信息(字典格式). 注意:部分 user_id 字符类型是 int / str
  86. @classmethod
  87. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  88. try:
  89. while True:
  90. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  91. if user_sheet is None:
  92. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  93. continue
  94. our_user_list = []
  95. for i in range(1, len(user_sheet)):
  96. our_uid = user_sheet[i][6]
  97. search_word = user_sheet[i][4]
  98. tag1 = user_sheet[i][8]
  99. tag2 = user_sheet[i][9]
  100. tag3 = user_sheet[i][10]
  101. tag4 = user_sheet[i][11]
  102. tag5 = user_sheet[i][12]
  103. tag6 = user_sheet[i][13]
  104. tag7 = user_sheet[i][14]
  105. Common.logger(log_type, crawler).info(f"正在更新 {search_word} 关键词信息\n")
  106. if our_uid is None:
  107. default_user = getUser.get_default_user()
  108. # 用来创建our_id的信息
  109. user_dict = {
  110. 'recommendStatus': -6,
  111. 'appRecommendStatus': -6,
  112. 'nickName': default_user['nickName'],
  113. 'avatarUrl': default_user['avatarUrl'],
  114. 'tagName': f'{tag1},{tag2},{tag3},{tag4},{tag5},{tag6},{tag7}',
  115. }
  116. Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
  117. our_uid = getUser.create_uid(log_type, crawler, user_dict, env)
  118. if env == 'prod':
  119. our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
  120. else:
  121. our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
  122. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  123. [[our_uid, our_user_link]])
  124. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  125. our_user_dict = {
  126. 'out_uid': '',
  127. 'search_word': search_word,
  128. 'our_uid': our_uid,
  129. 'our_user_link': f'https://admin.piaoquantv.com/ums/user/{our_uid}/post',
  130. }
  131. our_user_list.append(our_user_dict)
  132. return our_user_list
  133. except Exception as e:
  134. Common.logger(log_type, crawler).error(f'get_user_id_from_feishu异常:{e}\n')
  135. @classmethod
  136. def random_signature(cls):
  137. src_digits = string.digits # string_数字
  138. src_uppercase = string.ascii_uppercase # string_大写字母
  139. src_lowercase = string.ascii_lowercase # string_小写字母
  140. digits_num = random.randint(1, 6)
  141. uppercase_num = random.randint(1, 26 - digits_num - 1)
  142. lowercase_num = 26 - (digits_num + uppercase_num)
  143. password = random.sample(src_digits, digits_num) + random.sample(src_uppercase, uppercase_num) + random.sample(
  144. src_lowercase, lowercase_num)
  145. random.shuffle(password)
  146. new_password = 'AAAAAAAAAA' + ''.join(password)[10:-4] + 'AAAB'
  147. new_password_start = new_password[0:18]
  148. new_password_end = new_password[-7:]
  149. if new_password[18] == '8':
  150. new_password = new_password_start + 'w' + new_password_end
  151. elif new_password[18] == '9':
  152. new_password = new_password_start + 'x' + new_password_end
  153. elif new_password[18] == '-':
  154. new_password = new_password_start + 'y' + new_password_end
  155. elif new_password[18] == '.':
  156. new_password = new_password_start + 'z' + new_password_end
  157. else:
  158. new_password = new_password_start + 'y' + new_password_end
  159. return new_password
  160. # 获取视频详情
  161. @classmethod
  162. def get_video_url(cls, log_type, crawler, gid):
  163. try:
  164. url = 'https://www.ixigua.com/api/mixVideo/information?'
  165. headers = {
  166. "accept-encoding": "gzip, deflate",
  167. "accept-language": "zh-CN,zh-Hans;q=0.9",
  168. "user-agent": get_random_user_agent('pc'),
  169. "referer": "https://www.ixigua.com/7102614741050196520?logTag=0531c88ac04f38ab2c62",
  170. }
  171. params = {
  172. 'mixId': gid,
  173. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfC'
  174. 'NVVIOBNjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  175. 'X-Bogus': 'DFSzswVupYTANCJOSBk0P53WxM-r',
  176. '_signature': '_02B4Z6wo0000119LvEwAAIDCuktNZ0y5wkdfS7jAALThuOR8D9yWNZ.EmWHKV0WSn6Px'
  177. 'fPsH9-BldyxVje0f49ryXgmn7Tzk-swEHNb15TiGqa6YF.cX0jW8Eds1TtJOIZyfc9s5emH7gdWN94',
  178. }
  179. cookies = {
  180. 'ixigua-a-s': '1',
  181. 'msToken': 'IlG0wd0Pylyw9ghcYiB2YseUmTwrsrqqhXrbIcsSaTcLTJyVlbYJzk20zw3UO-CfrfCNVVIOB'
  182. 'NjIl7vfBoxnVUwO9ZyzAI3umSKsT5-pef_RRfQCJwmA',
  183. 'ttwid': '1%7C_yXQeHWwLZgCsgHClOwTCdYSOt_MjdOkgnPIkpi-Sr8%7C1661241238%7Cf57d0c5ef3f1d7'
  184. '6e049fccdca1ac54887c34d1f8731c8e51a49780ff0ceab9f8',
  185. 'tt_scid': 'QZ4l8KXDG0YAEaMCSbADdcybdKbUfG4BC6S4OBv9lpRS5VyqYLX2bIR8CTeZeGHR9ee3',
  186. 'MONITOR_WEB_ID': '0a49204a-7af5-4e96-95f0-f4bafb7450ad',
  187. '__ac_nonce': '06304878000964fdad287',
  188. '__ac_signature': '_02B4Z6wo00f017Rcr3AAAIDCUVxeW1tOKEu0fKvAAI4cvoYzV-wBhq7B6D8k0no7lb'
  189. 'FlvYoinmtK6UXjRIYPXnahUlFTvmWVtb77jsMkKAXzAEsLE56m36RlvL7ky.M3Xn52r9t1IEb7IR3ke8',
  190. 'ttcid': 'e56fabf6e85d4adf9e4d91902496a0e882',
  191. '_tea_utm_cache_1300': 'undefined',
  192. 'support_avif': 'false',
  193. 'support_webp': 'false',
  194. 'xiguavideopcwebid': '7134967546256016900',
  195. 'xiguavideopcwebid.sig': 'xxRww5R1VEMJN_dQepHorEu_eAc',
  196. }
  197. urllib3.disable_warnings()
  198. s = requests.session()
  199. # max_retries=3 重试3次
  200. s.mount('http://', HTTPAdapter(max_retries=3))
  201. s.mount('https://', HTTPAdapter(max_retries=3))
  202. response = s.get(url=url, headers=headers, params=params, cookies=cookies, verify=False,
  203. proxies=Common.tunnel_proxies(), timeout=5)
  204. response.close()
  205. if 'data' not in response.json() or response.json()['data'] == '':
  206. Common.logger(log_type, crawler).warning('get_video_info: response: {}', response)
  207. else:
  208. video_info = response.json()['data']['gidInformation']['packerData']['video']
  209. video_url_dict = {}
  210. # video_url
  211. if 'videoResource' not in video_info:
  212. video_url_dict["video_url"] = ''
  213. video_url_dict["audio_url"] = ''
  214. video_url_dict["video_width"] = 0
  215. video_url_dict["video_height"] = 0
  216. elif 'dash_120fps' in video_info['videoResource']:
  217. if "video_list" in video_info['videoResource']['dash_120fps'] and 'video_4' in \
  218. video_info['videoResource']['dash_120fps']['video_list']:
  219. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  220. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_4']['backup_url_1']
  221. if len(video_url) % 3 == 1:
  222. video_url += '=='
  223. elif len(video_url) % 3 == 2:
  224. video_url += '='
  225. elif len(audio_url) % 3 == 1:
  226. audio_url += '=='
  227. elif len(audio_url) % 3 == 2:
  228. audio_url += '='
  229. video_url = base64.b64decode(video_url).decode('utf8')
  230. audio_url = base64.b64decode(audio_url).decode('utf8')
  231. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vwidth']
  232. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_4']['vheight']
  233. video_url_dict["video_url"] = video_url
  234. video_url_dict["audio_url"] = audio_url
  235. video_url_dict["video_width"] = video_width
  236. video_url_dict["video_height"] = video_height
  237. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_3' in \
  238. video_info['videoResource']['dash_120fps']['video_list']:
  239. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  240. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_3']['backup_url_1']
  241. if len(video_url) % 3 == 1:
  242. video_url += '=='
  243. elif len(video_url) % 3 == 2:
  244. video_url += '='
  245. elif len(audio_url) % 3 == 1:
  246. audio_url += '=='
  247. elif len(audio_url) % 3 == 2:
  248. audio_url += '='
  249. video_url = base64.b64decode(video_url).decode('utf8')
  250. audio_url = base64.b64decode(audio_url).decode('utf8')
  251. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vwidth']
  252. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_3']['vheight']
  253. video_url_dict["video_url"] = video_url
  254. video_url_dict["audio_url"] = audio_url
  255. video_url_dict["video_width"] = video_width
  256. video_url_dict["video_height"] = video_height
  257. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_2' in \
  258. video_info['videoResource']['dash_120fps']['video_list']:
  259. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  260. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_2']['backup_url_1']
  261. if len(video_url) % 3 == 1:
  262. video_url += '=='
  263. elif len(video_url) % 3 == 2:
  264. video_url += '='
  265. elif len(audio_url) % 3 == 1:
  266. audio_url += '=='
  267. elif len(audio_url) % 3 == 2:
  268. audio_url += '='
  269. video_url = base64.b64decode(video_url).decode('utf8')
  270. audio_url = base64.b64decode(audio_url).decode('utf8')
  271. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vwidth']
  272. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_2']['vheight']
  273. video_url_dict["video_url"] = video_url
  274. video_url_dict["audio_url"] = audio_url
  275. video_url_dict["video_width"] = video_width
  276. video_url_dict["video_height"] = video_height
  277. elif "video_list" in video_info['videoResource']['dash_120fps'] and 'video_1' in \
  278. video_info['videoResource']['dash_120fps']['video_list']:
  279. video_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  280. audio_url = video_info['videoResource']['dash_120fps']['video_list']['video_1']['backup_url_1']
  281. if len(video_url) % 3 == 1:
  282. video_url += '=='
  283. elif len(video_url) % 3 == 2:
  284. video_url += '='
  285. elif len(audio_url) % 3 == 1:
  286. audio_url += '=='
  287. elif len(audio_url) % 3 == 2:
  288. audio_url += '='
  289. video_url = base64.b64decode(video_url).decode('utf8')
  290. audio_url = base64.b64decode(audio_url).decode('utf8')
  291. video_width = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vwidth']
  292. video_height = video_info['videoResource']['dash_120fps']['video_list']['video_1']['vheight']
  293. video_url_dict["video_url"] = video_url
  294. video_url_dict["audio_url"] = audio_url
  295. video_url_dict["video_width"] = video_width
  296. video_url_dict["video_height"] = video_height
  297. elif 'dynamic_video' in video_info['videoResource']['dash_120fps'] \
  298. and 'dynamic_video_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  299. and 'dynamic_audio_list' in video_info['videoResource']['dash_120fps']['dynamic_video'] \
  300. and len(
  301. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list']) != 0 \
  302. and len(
  303. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list']) != 0:
  304. video_url = \
  305. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  306. 'backup_url_1']
  307. audio_url = \
  308. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_audio_list'][-1][
  309. 'backup_url_1']
  310. if len(video_url) % 3 == 1:
  311. video_url += '=='
  312. elif len(video_url) % 3 == 2:
  313. video_url += '='
  314. elif len(audio_url) % 3 == 1:
  315. audio_url += '=='
  316. elif len(audio_url) % 3 == 2:
  317. audio_url += '='
  318. video_url = base64.b64decode(video_url).decode('utf8')
  319. audio_url = base64.b64decode(audio_url).decode('utf8')
  320. video_width = \
  321. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  322. 'vwidth']
  323. video_height = \
  324. video_info['videoResource']['dash_120fps']['dynamic_video']['dynamic_video_list'][-1][
  325. 'vheight']
  326. video_url_dict["video_url"] = video_url
  327. video_url_dict["audio_url"] = audio_url
  328. video_url_dict["video_width"] = video_width
  329. video_url_dict["video_height"] = video_height
  330. else:
  331. video_url_dict["video_url"] = ''
  332. video_url_dict["audio_url"] = ''
  333. video_url_dict["video_width"] = 0
  334. video_url_dict["video_height"] = 0
  335. elif 'dash' in video_info['videoResource']:
  336. if "video_list" in video_info['videoResource']['dash'] and 'video_4' in \
  337. video_info['videoResource']['dash']['video_list']:
  338. video_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  339. audio_url = video_info['videoResource']['dash']['video_list']['video_4']['backup_url_1']
  340. if len(video_url) % 3 == 1:
  341. video_url += '=='
  342. elif len(video_url) % 3 == 2:
  343. video_url += '='
  344. elif len(audio_url) % 3 == 1:
  345. audio_url += '=='
  346. elif len(audio_url) % 3 == 2:
  347. audio_url += '='
  348. video_url = base64.b64decode(video_url).decode('utf8')
  349. audio_url = base64.b64decode(audio_url).decode('utf8')
  350. video_width = video_info['videoResource']['dash']['video_list']['video_4']['vwidth']
  351. video_height = video_info['videoResource']['dash']['video_list']['video_4']['vheight']
  352. video_url_dict["video_url"] = video_url
  353. video_url_dict["audio_url"] = audio_url
  354. video_url_dict["video_width"] = video_width
  355. video_url_dict["video_height"] = video_height
  356. elif "video_list" in video_info['videoResource']['dash'] and 'video_3' in \
  357. video_info['videoResource']['dash']['video_list']:
  358. video_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  359. audio_url = video_info['videoResource']['dash']['video_list']['video_3']['backup_url_1']
  360. if len(video_url) % 3 == 1:
  361. video_url += '=='
  362. elif len(video_url) % 3 == 2:
  363. video_url += '='
  364. elif len(audio_url) % 3 == 1:
  365. audio_url += '=='
  366. elif len(audio_url) % 3 == 2:
  367. audio_url += '='
  368. video_url = base64.b64decode(video_url).decode('utf8')
  369. audio_url = base64.b64decode(audio_url).decode('utf8')
  370. video_width = video_info['videoResource']['dash']['video_list']['video_3']['vwidth']
  371. video_height = video_info['videoResource']['dash']['video_list']['video_3']['vheight']
  372. video_url_dict["video_url"] = video_url
  373. video_url_dict["audio_url"] = audio_url
  374. video_url_dict["video_width"] = video_width
  375. video_url_dict["video_height"] = video_height
  376. elif "video_list" in video_info['videoResource']['dash'] and 'video_2' in \
  377. video_info['videoResource']['dash']['video_list']:
  378. video_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  379. audio_url = video_info['videoResource']['dash']['video_list']['video_2']['backup_url_1']
  380. if len(video_url) % 3 == 1:
  381. video_url += '=='
  382. elif len(video_url) % 3 == 2:
  383. video_url += '='
  384. elif len(audio_url) % 3 == 1:
  385. audio_url += '=='
  386. elif len(audio_url) % 3 == 2:
  387. audio_url += '='
  388. video_url = base64.b64decode(video_url).decode('utf8')
  389. audio_url = base64.b64decode(audio_url).decode('utf8')
  390. video_width = video_info['videoResource']['dash']['video_list']['video_2']['vwidth']
  391. video_height = video_info['videoResource']['dash']['video_list']['video_2']['vheight']
  392. video_url_dict["video_url"] = video_url
  393. video_url_dict["audio_url"] = audio_url
  394. video_url_dict["video_width"] = video_width
  395. video_url_dict["video_height"] = video_height
  396. elif "video_list" in video_info['videoResource']['dash'] and 'video_1' in \
  397. video_info['videoResource']['dash']['video_list']:
  398. video_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  399. audio_url = video_info['videoResource']['dash']['video_list']['video_1']['backup_url_1']
  400. if len(video_url) % 3 == 1:
  401. video_url += '=='
  402. elif len(video_url) % 3 == 2:
  403. video_url += '='
  404. elif len(audio_url) % 3 == 1:
  405. audio_url += '=='
  406. elif len(audio_url) % 3 == 2:
  407. audio_url += '='
  408. video_url = base64.b64decode(video_url).decode('utf8')
  409. audio_url = base64.b64decode(audio_url).decode('utf8')
  410. video_width = video_info['videoResource']['dash']['video_list']['video_1']['vwidth']
  411. video_height = video_info['videoResource']['dash']['video_list']['video_1']['vheight']
  412. video_url_dict["video_url"] = video_url
  413. video_url_dict["audio_url"] = audio_url
  414. video_url_dict["video_width"] = video_width
  415. video_url_dict["video_height"] = video_height
  416. elif 'dynamic_video' in video_info['videoResource']['dash'] \
  417. and 'dynamic_video_list' in video_info['videoResource']['dash']['dynamic_video'] \
  418. and 'dynamic_audio_list' in video_info['videoResource']['dash']['dynamic_video'] \
  419. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list']) != 0 \
  420. and len(video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list']) != 0:
  421. video_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  422. 'backup_url_1']
  423. audio_url = video_info['videoResource']['dash']['dynamic_video']['dynamic_audio_list'][-1][
  424. 'backup_url_1']
  425. if len(video_url) % 3 == 1:
  426. video_url += '=='
  427. elif len(video_url) % 3 == 2:
  428. video_url += '='
  429. elif len(audio_url) % 3 == 1:
  430. audio_url += '=='
  431. elif len(audio_url) % 3 == 2:
  432. audio_url += '='
  433. video_url = base64.b64decode(video_url).decode('utf8')
  434. audio_url = base64.b64decode(audio_url).decode('utf8')
  435. video_width = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  436. 'vwidth']
  437. video_height = video_info['videoResource']['dash']['dynamic_video']['dynamic_video_list'][-1][
  438. 'vheight']
  439. video_url_dict["video_url"] = video_url
  440. video_url_dict["audio_url"] = audio_url
  441. video_url_dict["video_width"] = video_width
  442. video_url_dict["video_height"] = video_height
  443. else:
  444. video_url_dict["video_url"] = ''
  445. video_url_dict["audio_url"] = ''
  446. video_url_dict["video_width"] = 0
  447. video_url_dict["video_height"] = 0
  448. elif 'normal' in video_info['videoResource']:
  449. if "video_list" in video_info['videoResource']['normal'] and 'video_4' in \
  450. video_info['videoResource']['normal']['video_list']:
  451. video_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  452. audio_url = video_info['videoResource']['normal']['video_list']['video_4']['backup_url_1']
  453. if len(video_url) % 3 == 1:
  454. video_url += '=='
  455. elif len(video_url) % 3 == 2:
  456. video_url += '='
  457. elif len(audio_url) % 3 == 1:
  458. audio_url += '=='
  459. elif len(audio_url) % 3 == 2:
  460. audio_url += '='
  461. video_url = base64.b64decode(video_url).decode('utf8')
  462. audio_url = base64.b64decode(audio_url).decode('utf8')
  463. video_width = video_info['videoResource']['normal']['video_list']['video_4']['vwidth']
  464. video_height = video_info['videoResource']['normal']['video_list']['video_4']['vheight']
  465. video_url_dict["video_url"] = video_url
  466. video_url_dict["audio_url"] = audio_url
  467. video_url_dict["video_width"] = video_width
  468. video_url_dict["video_height"] = video_height
  469. elif "video_list" in video_info['videoResource']['normal'] and 'video_3' in \
  470. video_info['videoResource']['normal']['video_list']:
  471. video_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  472. audio_url = video_info['videoResource']['normal']['video_list']['video_3']['backup_url_1']
  473. if len(video_url) % 3 == 1:
  474. video_url += '=='
  475. elif len(video_url) % 3 == 2:
  476. video_url += '='
  477. elif len(audio_url) % 3 == 1:
  478. audio_url += '=='
  479. elif len(audio_url) % 3 == 2:
  480. audio_url += '='
  481. video_url = base64.b64decode(video_url).decode('utf8')
  482. audio_url = base64.b64decode(audio_url).decode('utf8')
  483. video_width = video_info['videoResource']['normal']['video_list']['video_3']['vwidth']
  484. video_height = video_info['videoResource']['normal']['video_list']['video_3']['vheight']
  485. video_url_dict["video_url"] = video_url
  486. video_url_dict["audio_url"] = audio_url
  487. video_url_dict["video_width"] = video_width
  488. video_url_dict["video_height"] = video_height
  489. elif "video_list" in video_info['videoResource']['normal'] and 'video_2' in \
  490. video_info['videoResource']['normal']['video_list']:
  491. video_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  492. audio_url = video_info['videoResource']['normal']['video_list']['video_2']['backup_url_1']
  493. if len(video_url) % 3 == 1:
  494. video_url += '=='
  495. elif len(video_url) % 3 == 2:
  496. video_url += '='
  497. elif len(audio_url) % 3 == 1:
  498. audio_url += '=='
  499. elif len(audio_url) % 3 == 2:
  500. audio_url += '='
  501. video_url = base64.b64decode(video_url).decode('utf8')
  502. audio_url = base64.b64decode(audio_url).decode('utf8')
  503. video_width = video_info['videoResource']['normal']['video_list']['video_2']['vwidth']
  504. video_height = video_info['videoResource']['normal']['video_list']['video_2']['vheight']
  505. video_url_dict["video_url"] = video_url
  506. video_url_dict["audio_url"] = audio_url
  507. video_url_dict["video_width"] = video_width
  508. video_url_dict["video_height"] = video_height
  509. elif "video_list" in video_info['videoResource']['normal'] and 'video_1' in \
  510. video_info['videoResource']['normal']['video_list']:
  511. video_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  512. audio_url = video_info['videoResource']['normal']['video_list']['video_1']['backup_url_1']
  513. if len(video_url) % 3 == 1:
  514. video_url += '=='
  515. elif len(video_url) % 3 == 2:
  516. video_url += '='
  517. elif len(audio_url) % 3 == 1:
  518. audio_url += '=='
  519. elif len(audio_url) % 3 == 2:
  520. audio_url += '='
  521. video_url = base64.b64decode(video_url).decode('utf8')
  522. audio_url = base64.b64decode(audio_url).decode('utf8')
  523. video_width = video_info['videoResource']['normal']['video_list']['video_1']['vwidth']
  524. video_height = video_info['videoResource']['normal']['video_list']['video_1']['vheight']
  525. video_url_dict["video_url"] = video_url
  526. video_url_dict["audio_url"] = audio_url
  527. video_url_dict["video_width"] = video_width
  528. video_url_dict["video_height"] = video_height
  529. elif 'dynamic_video' in video_info['videoResource']['normal'] \
  530. and 'dynamic_video_list' in video_info['videoResource']['normal']['dynamic_video'] \
  531. and 'dynamic_audio_list' in video_info['videoResource']['normal']['dynamic_video'] \
  532. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list']) != 0 \
  533. and len(video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list']) != 0:
  534. video_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  535. 'backup_url_1']
  536. audio_url = video_info['videoResource']['normal']['dynamic_video']['dynamic_audio_list'][-1][
  537. 'backup_url_1']
  538. if len(video_url) % 3 == 1:
  539. video_url += '=='
  540. elif len(video_url) % 3 == 2:
  541. video_url += '='
  542. elif len(audio_url) % 3 == 1:
  543. audio_url += '=='
  544. elif len(audio_url) % 3 == 2:
  545. audio_url += '='
  546. video_url = base64.b64decode(video_url).decode('utf8')
  547. audio_url = base64.b64decode(audio_url).decode('utf8')
  548. video_width = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  549. 'vwidth']
  550. video_height = video_info['videoResource']['normal']['dynamic_video']['dynamic_video_list'][-1][
  551. 'vheight']
  552. video_url_dict["video_url"] = video_url
  553. video_url_dict["audio_url"] = audio_url
  554. video_url_dict["video_width"] = video_width
  555. video_url_dict["video_height"] = video_height
  556. else:
  557. video_url_dict["video_url"] = ''
  558. video_url_dict["audio_url"] = ''
  559. video_url_dict["video_width"] = 0
  560. video_url_dict["video_height"] = 0
  561. else:
  562. video_url_dict["video_url"] = ''
  563. video_url_dict["audio_url"] = ''
  564. video_url_dict["video_width"] = 0
  565. video_url_dict["video_height"] = 0
  566. return video_url_dict
  567. except Exception as e:
  568. Common.logger(log_type, crawler).error(f'get_video_url:{e}\n')
  569. @classmethod
  570. def get_video_info(cls, log_type, crawler, item_id):
  571. d_url = "http://a6.pstatp.com/article/full/11/1/{video_id}/{video_id}/1/0/?iid=3636030325&device_id=5787057242" \
  572. "&ac=wifi&channel=wandoujia&aid=13&app_name=news_article&version_code=532&version_name=5.3.2&device_platform" \
  573. "=android&ab_client=a1%2Cc2%2Ce1%2Cf2%2Cg2%2Cb3%2Cf4&abflag=3&ssmix=a&device_type=SM705" \
  574. "&device_brand=smartisan&os_api=19&os_version=4.4.2&uuid=864593021012562&openudid=e23a5ff037ef2d1a" \
  575. "&manifest_version_code=532&resolution=1080*1920&dpi=480&update_version_code=5320".format(
  576. video_id=item_id)
  577. res = requests.get(url=d_url, headers=get_random_header('pc'), proxies=Common.tunnel_proxies())
  578. data = json.loads(res.text)['data']
  579. item_counter = data['h5_extra']['itemCell']['itemCounter']
  580. user_info = data['user_info']
  581. detail_info = data['video_detail_info']
  582. video_dict = {'video_title': data['title'].replace('"', '').replace("'", ''),
  583. 'video_id': detail_info['video_id'],
  584. 'gid': data['group_id'],
  585. 'play_cnt': item_counter['videoWatchCount'],
  586. 'comment_cnt': item_counter['commentCount'],
  587. 'like_cnt': item_counter['diggCount'],
  588. 'share_cnt': item_counter['shareCount'],
  589. 'duration': data['video_duration'],
  590. 'publish_time_stamp': data['publish_time'],
  591. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S",
  592. time.localtime(data['publish_time'])),
  593. 'user_name': user_info['name'],
  594. 'user_id': user_info['user_id'],
  595. 'avatar_url': user_info['avatar_url'],
  596. 'cover_url': data['large_image']['url'].replace('\u0026', '&'),
  597. }
  598. return video_dict
  599. @classmethod
  600. def is_ruled(cls, log_type, crawler, video_dict, rule_dict):
  601. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  602. if video_dict['publish_time_stamp'] <= old_time:
  603. return False
  604. elif video_dict['play_cnt'] <= rule_dict['play_cnt']:
  605. return False
  606. elif video_dict['duration'] < rule_dict['min_duration'] or video_dict['duration'] > rule_dict['max_duration']:
  607. return False
  608. else:
  609. return True
  610. @classmethod
  611. def get_videolist(cls, log_type, crawler, strategy, our_uid, search_word, oss_endpoint, env, machine):
  612. total_count = 1
  613. offset = 0
  614. while True:
  615. signature = cls.random_signature()
  616. url = "https://www.ixigua.com/api/searchv2/complex/{}/{}?order_type=publish_time&click_position=new".format(
  617. quote(search_word), offset, signature)
  618. headers = {
  619. 'referer': 'https://www.ixigua.com/search/{}/?logTag=594535e3690f17a88cdb&tab_name=search'.format(
  620. quote(search_word)),
  621. 'cookie': 'ttwid=1%7Cx_4RDmVTqp6BQ5Xy5AnuCZCQdDyDxv-fnMVWzj19VU0%7C1679382377%7C4e25692dc4b9d5dca56d690001d168b21ed028a9ac075808ab9262238cb405ee;',
  622. 'user-agent': get_random_user_agent('pc'),
  623. }
  624. try:
  625. res = requests.request("GET", url, headers=headers, proxies=Common.tunnel_proxies())
  626. search_list = res.json()['data']['data']
  627. except Exception as e:
  628. continue
  629. if not search_list:
  630. Common.logger(log_type, crawler).error(f'关键词:{search_word},没有获取到视频列表:offset{offset}')
  631. return
  632. for video_info in search_list:
  633. v_type = video_info['type']
  634. rule_dict = cls.get_rule(log_type, crawler)
  635. publish_time = video_info['data']['publish_time']
  636. old_time = int(time.time()) - (3600 * 24 * rule_dict['publish_time'])
  637. if publish_time <= old_time:
  638. Common.logger(log_type, crawler).error(f'关键词:{search_word},抓取完毕,退出抓取\n')
  639. return
  640. filter_words = get_config_from_mysql(log_type, crawler, env, text='filter')
  641. is_filter = False
  642. for filter_word in filter_words:
  643. if filter_word in video_dict['video_title']:
  644. is_filter = True
  645. break
  646. if is_filter:
  647. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  648. continue
  649. if v_type == 'video':
  650. item_id = video_info['data']['group_id']
  651. if video_info['data']['publish_time'] <= old_time:
  652. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  653. continue
  654. elif video_info['data']['video_watch_count'] <= rule_dict['play_cnt']:
  655. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  656. continue
  657. elif video_info['data']['video_time'] < rule_dict['min_duration'] or video_info['data'][
  658. 'video_time'] > rule_dict['max_duration']:
  659. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},不符合抓取规则\n')
  660. continue
  661. try:
  662. video_dict = cls.get_video_info(log_type, crawler, item_id)
  663. video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  664. video_dict['video_width'] = video_url_dict["video_width"]
  665. video_dict['video_height'] = video_url_dict["video_height"]
  666. video_dict['audio_url'] = video_url_dict["audio_url"]
  667. video_dict['video_url'] = video_url_dict["video_url"]
  668. video_dict['session'] = signature
  669. except Exception as e:
  670. Common.logger(log_type, crawler).error(
  671. f'关键词:{search_word},视频:{item_id},获取详情失败,原因:{e}')
  672. continue
  673. if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  674. Common.logger(log_type, crawler).info(
  675. f'关键词:{search_word},gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  676. continue
  677. for k, v in video_dict.items():
  678. Common.logger(log_type, crawler).info(f"{k}:{v}")
  679. try:
  680. # print(
  681. # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  682. cls.download_publish(
  683. search_word=search_word,
  684. log_type=log_type,
  685. crawler=crawler,
  686. video_dict=video_dict,
  687. rule_dict=rule_dict,
  688. strategy=strategy,
  689. our_uid=our_uid,
  690. oss_endpoint=oss_endpoint,
  691. env=env,
  692. machine=machine
  693. )
  694. except Exception as e:
  695. Common.logger(log_type, crawler).error(f'关键词:{search_word},视频:{item_id},下载失败,原因:{e}')
  696. continue
  697. total_count += 1
  698. Common.logger(log_type, crawler).info(
  699. f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  700. if total_count >= 30:
  701. return
  702. # elif v_type == 'pseries':
  703. # try:
  704. # item_id = video_info['data']['group_id']
  705. # p_url = "https://www.ixigua.com/api/videov2/pseries_more_v2?pSeriesId={}&rank=0&tailCount=30&aid=1768&msToken=wHEafKFLx0k3hihOPbhXYNsfMBxWiq2AB0K5R-34kEFixyq3ATi_DuXbL4Q47J9C2uK2zgWItMa1g2yc4FyDxM4dMijmSdwF4c4T8sSmOkoOI0wGzeEcPw==&X-Bogus=DFSzswVOzdUANG3ItaVHYr7TlqCv&_signature=_02B4Z6wo00001vB6l3QAAIDBZKzMeTihTmbwepPAANgh1Ai3JgFFo4e6anoezmBEpHfEMEYlWISGhXI-QKfev4N-2bwgXsHOuNGLnOsGqMbANIjFPh7Yj6OakQWrkbACenlv0P-arswtB6Zn45".format(
  706. # item_id)
  707. # p_headers = {
  708. # 'referer': 'https://www.ixigua.com/{}?series_flow=1&logTag=cfec9d927da968feff89'.format(
  709. # item_id),
  710. # 'user-agent': get_random_user_agent('pc'),
  711. # }
  712. # p_res = requests.request("GET", p_url, headers=p_headers,
  713. # proxies=Common.tunnel_proxies()).json()
  714. # except Exception as e:
  715. # Common.logger(log_type, crawler).error(f'合集:{item_id},没有获取到合集详情,原因:{e}')
  716. # continue
  717. # for video in p_res['data']:
  718. # item_id = video['item_id']
  719. # try:
  720. # video_dict = cls.get_video_info(log_type, crawler, item_id)
  721. # video_url_dict = cls.get_video_url(log_type, crawler, video_dict['gid'])
  722. # video_dict['video_width'] = video_url_dict["video_width"]
  723. # video_dict['video_height'] = video_url_dict["video_height"]
  724. # video_dict['audio_url'] = video_url_dict["audio_url"]
  725. # video_dict['video_url'] = video_url_dict["video_url"]
  726. # video_dict['session'] = signature
  727. # except Exception as e:
  728. # Common.logger(log_type, crawler).error(f'视频:{item_id},没有获取到视频详情,原因:{e}')
  729. # continue
  730. # if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  731. # Common.logger(log_type, crawler).info(
  732. # f'gid:{video_dict["gid"]},视频已下载,无需重复下载\n')
  733. # continue
  734. # if not cls.is_ruled(log_type, crawler, video_dict, rule_dict):
  735. # Common.logger(log_type, crawler).error(f'视频:{item_id},不符合抓取规则\n')
  736. # continue
  737. # for k, v in video_dict.items():
  738. # Common.logger(log_type, crawler).info(f"{k}:{v}")
  739. # try:
  740. # # print(
  741. # # f'search_word:{search_word},title:{video_dict["video_title"]},gid:{video_dict["gid"]},offset:{offset}, total:{total_count}')
  742. # cls.download_publish(
  743. # search_word=search_word,
  744. # log_type=log_type,
  745. # crawler=crawler,
  746. # video_dict=video_dict,
  747. # rule_dict=rule_dict,
  748. # strategy=strategy,
  749. # our_uid=our_uid,
  750. # oss_endpoint=oss_endpoint,
  751. # env=env,
  752. # machine=machine
  753. # )
  754. # total_count += 1
  755. # if total_count >= 30:
  756. # return
  757. # else:
  758. # break
  759. # except Exception as e:
  760. # Common.logger(log_type, crawler).error(f'视频:{item_id},download_publish异常:{e}\n')
  761. offset += 10
  762. @classmethod
  763. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  764. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  765. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  766. return len(repeat_video)
  767. # 下载 / 上传
  768. @classmethod
  769. def download_publish(cls, log_type, crawler, search_word, strategy, video_dict, rule_dict, our_uid, oss_endpoint,
  770. env, machine):
  771. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_video',
  772. title=video_dict['video_title'], url=video_dict['video_url'])
  773. # 下载音频
  774. Common.download_method(log_type=log_type, crawler=crawler, text='xigua_audio',
  775. title=video_dict['video_title'], url=video_dict['audio_url'])
  776. # 合成音视频
  777. Common.video_compose(log_type=log_type, crawler=crawler,
  778. video_dir=f"./{crawler}/videos/{video_dict['video_title']}")
  779. md_title = md5(video_dict['video_title'].encode('utf8')).hexdigest()
  780. if os.path.getsize(f"./{crawler}/videos/{md_title}/video.mp4") == 0:
  781. # 删除视频文件夹
  782. shutil.rmtree(f"./{crawler}/videos/{md_title}")
  783. Common.logger(log_type, crawler).info("视频size=0,删除成功\n")
  784. return
  785. # ffmpeg_dict = Common.ffmpeg(log_type, crawler,
  786. # f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  787. # if ffmpeg_dict is None or ffmpeg_dict['size'] == 0:
  788. # Common.logger(log_type, crawler).warning(f"下载的视频无效,已删除\n")
  789. # # 删除视频文件夹
  790. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  791. # return
  792. # 下载封面
  793. Common.download_method(log_type=log_type, crawler=crawler, text='cover',
  794. title=video_dict['video_title'], url=video_dict['cover_url'])
  795. # 保存视频信息至txt
  796. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  797. # 上传视频
  798. Common.logger(log_type, crawler).info("开始上传视频...")
  799. our_video_id = Publish.upload_and_publish(log_type=log_type,
  800. crawler=crawler,
  801. strategy=strategy,
  802. our_uid=our_uid,
  803. env=env,
  804. oss_endpoint=oss_endpoint)
  805. if env == 'dev':
  806. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  807. else:
  808. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  809. Common.logger(log_type, crawler).info("视频上传完成")
  810. if our_video_id is None:
  811. # 删除视频文件夹
  812. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  813. return
  814. # 视频写入飞书
  815. Feishu.insert_columns(log_type, 'xigua', "BUNvGC", "ROWS", 1, 2)
  816. upload_time = int(time.time())
  817. values = [[
  818. search_word,
  819. time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  820. "关键词搜索",
  821. video_dict['video_title'],
  822. str(video_dict['video_id']),
  823. our_video_link,
  824. video_dict['gid'],
  825. video_dict['play_cnt'],
  826. video_dict['comment_cnt'],
  827. video_dict['like_cnt'],
  828. video_dict['share_cnt'],
  829. video_dict['duration'],
  830. str(video_dict['video_width']) + '*' + str(video_dict['video_height']),
  831. video_dict['publish_time_str'],
  832. video_dict['user_name'],
  833. video_dict['user_id'],
  834. video_dict['avatar_url'],
  835. video_dict['cover_url'],
  836. video_dict['video_url'],
  837. video_dict['audio_url']]]
  838. time.sleep(1)
  839. Feishu.update_values(log_type, 'xigua', "BUNvGC", "E2:Z2", values)
  840. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  841. # 视频信息保存数据库
  842. insert_sql = f""" insert into crawler_video(video_id,
  843. user_id,
  844. out_user_id,
  845. platform,
  846. strategy,
  847. out_video_id,
  848. video_title,
  849. cover_url,
  850. video_url,
  851. duration,
  852. publish_time,
  853. play_cnt,
  854. crawler_rule,
  855. width,
  856. height)
  857. values({our_video_id},
  858. {our_uid},
  859. "{video_dict['user_id']}",
  860. "{cls.platform}",
  861. "定向爬虫策略",
  862. "{video_dict['video_id']}",
  863. "{video_dict['video_title']}",
  864. "{video_dict['cover_url']}",
  865. "{video_dict['video_url']}",
  866. {int(video_dict['duration'])},
  867. "{video_dict['publish_time_str']}",
  868. {int(video_dict['play_cnt'])},
  869. '{json.dumps(rule_dict)}',
  870. {int(video_dict['video_width'])},
  871. {int(video_dict['video_height'])}) """
  872. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  873. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  874. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  875. @classmethod
  876. def get_search_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  877. try:
  878. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="SSPNPW", env=env,
  879. machine=machine)
  880. for user in user_list:
  881. search_word = user["search_word"]
  882. our_uid = user["our_uid"]
  883. Common.logger(log_type, crawler).info(f"开始抓取 {search_word} 用户主页视频\n")
  884. cls.get_videolist(log_type=log_type,
  885. crawler=crawler,
  886. strategy=strategy,
  887. our_uid=our_uid,
  888. search_word=search_word,
  889. oss_endpoint=oss_endpoint,
  890. env=env,
  891. machine=machine)
  892. except Exception as e:
  893. Common.logger(log_type, crawler).error(f"get_search_videos:{e}\n")
  894. if __name__ == '__main__':
  895. # print(Follow.get_signature("follow", "xigua", "95420624045", "local"))
  896. # XiguaSearch.get_search_videos('search', 'xigua', 'xigua_search', 'inner', 'prod', 'aliyun')
  897. # Follow.get_videolist(log_type="follow",
  898. # crawler="xigua",
  899. # strategy="定向爬虫策略",
  900. # our_uid="6267141",
  901. # out_uid="95420624045",
  902. # oss_endpoint="out",
  903. # env="dev",
  904. # machine="local")
  905. # print(Follow.random_signature())
  906. # rule = Follow.get_rule("follow", "xigua")
  907. # print(type(rule))
  908. # print(type(json.dumps(rule)))
  909. # print(json.dumps(rule))
  910. pass