youtube_follow_api.py 60 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/3
  4. """
  5. YouTube 定向榜
  6. 1. 发布时间<=1个月
  7. 2. 10分钟>=时长>=1分钟
  8. """
  9. import os
  10. import re
  11. import shutil
  12. import sys
  13. import time
  14. import json
  15. import requests
  16. sys.path.append(os.getcwd())
  17. from common.common import Common
  18. from common.db import MysqlHelper
  19. from common.feishu import Feishu
  20. from common.getuser import getUser
  21. from common.publish import Publish
  22. from common.translate import Translate
  23. from common.public import get_user_from_mysql, get_config_from_mysql
  24. headers = {
  25. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
  26. }
  27. def format_nums(data):
  28. data_dict = [{'亿': 100000000}, {'百万': 1000000}, {'万': 10000}, {'k': 1000}, {'w': 10000}, {'m': 1000000},
  29. {'千': 1000}, {'M': 1000000}, {'K': 1000}, {'W': 10000}]
  30. data = str(data)
  31. for i in data_dict:
  32. index = data.find(list(i.keys())[0])
  33. if index > 0:
  34. count = int(float(data[:index]) * list(i.values())[0])
  35. return count
  36. elif index < 0:
  37. continue
  38. count = int(float(re.findall(r'\d+', data)[0]))
  39. return count
  40. class YoutubeFollow:
  41. # 翻页参数
  42. continuation = ''
  43. # 抓取平台
  44. platform = 'youtube'
  45. headers = {
  46. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
  47. }
  48. @classmethod
  49. def get_out_user_info(cls, log_type, crawler, browse_id, out_user_id):
  50. """
  51. 获取站外用户信息
  52. :param log_type: 日志
  53. :param crawler: 哪款爬虫
  54. :param browse_id: browse_id
  55. :param out_user_id: 站外用户 UID
  56. :return: out_user_dict = {'out_user_name': 站外用户昵称,
  57. 'out_avatar_url': 站外用户头像,
  58. 'out_fans': 站外用户粉丝量,
  59. 'out_play_cnt': 站外用户总播放量,
  60. 'out_create_time': 站外用户创建时间}
  61. """
  62. try:
  63. url = f'https://www.youtube.com/{out_user_id}/about'
  64. res = requests.get(url=url, headers=headers)
  65. info = re.findall(r'var ytInitialData = (.*?);</script>', res.text, re.S)[0]
  66. data = json.loads(info)
  67. header = data['header']['c4TabbedHeaderRenderer']
  68. tabs = data['contents']['twoColumnBrowseResultsRenderer']['tabs']
  69. try:
  70. subsimpleText = header['subscriberCountText']['simpleText'].replace('位订阅者', '')
  71. out_fans = format_nums(subsimpleText)
  72. except Exception as e:
  73. out_fans = 0
  74. for tab in tabs:
  75. if 'tabRenderer' not in tab or 'content' not in tab['tabRenderer']:
  76. continue
  77. viewCountText = \
  78. tab['tabRenderer']['content']['sectionListRenderer']['contents'][0]['itemSectionRenderer'][
  79. 'contents'][0]['channelAboutFullMetadataRenderer']['viewCountText']['simpleText']
  80. out_create_time = \
  81. tab['tabRenderer']['content']['sectionListRenderer']['contents'][0]['itemSectionRenderer'][
  82. 'contents'][0]['channelAboutFullMetadataRenderer']['joinedDateText']['runs'][1]['text']
  83. break
  84. out_user_dict = {
  85. 'out_user_name': header['title'],
  86. 'out_avatar_url': header['avatar']['thumbnails'][-1]['url'],
  87. 'out_fans': out_fans,
  88. 'out_play_cnt': int(
  89. viewCountText.replace('收看次數:', '').replace('次', '').replace(',', '')) if viewCountText else 0,
  90. 'out_create_time': out_create_time.replace('年', '-').replace('月', '-').replace('日', ''),
  91. }
  92. # print(out_user_dict)
  93. return out_user_dict
  94. except Exception as e:
  95. Common.logger(log_type, crawler).error(f'get_out_user_info异常:{e}\n')
  96. @classmethod
  97. def get_user_from_feishu(cls, log_type, crawler, sheetid, env, machine):
  98. """
  99. 补全飞书用户表信息,并返回
  100. :param log_type: 日志
  101. :param crawler: 哪款爬虫
  102. :param sheetid: 飞书表
  103. :param env: 正式环境:prod,测试环境:dev
  104. :param machine: 部署机器,阿里云填写 aliyun,aliyun_hk ,线下分别填写 macpro,macair,local
  105. :return: user_list
  106. """
  107. try:
  108. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  109. user_list = []
  110. for i in range(1, len(user_sheet)):
  111. out_uid = user_sheet[i][2]
  112. user_name = user_sheet[i][3]
  113. browse_id = user_sheet[i][5]
  114. our_uid = user_sheet[i][6]
  115. uer_url = user_sheet[i][4]
  116. if out_uid is not None and user_name is not None:
  117. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  118. if our_uid is None:
  119. sql = f""" select * from crawler_user where platform="{cls.platform}" and out_user_id="{out_uid}" """
  120. our_user_info = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  121. # 数据库中(youtube + out_user_id)返回数量 == 0,则创建站内账号UID,并写入定向账号飞书表。并结合站外用户信息,一并写入爬虫账号数据库
  122. if not our_user_info:
  123. # 获取站外账号信息,写入数据库
  124. try:
  125. out_user_dict = cls.get_out_user_info(log_type, crawler, browse_id, out_uid)
  126. except Exception as e:
  127. continue
  128. out_avatar_url = out_user_dict['out_avatar_url']
  129. out_create_time = out_user_dict['out_create_time']
  130. out_play_cnt = out_user_dict['out_play_cnt']
  131. out_fans = out_user_dict['out_fans']
  132. tag = 'youtube爬虫,定向爬虫策略'
  133. # 创建站内账号
  134. create_user_dict = {
  135. 'nickName': user_name,
  136. 'avatarUrl': out_avatar_url,
  137. 'tagName': tag,
  138. }
  139. our_uid = getUser.create_uid(log_type, crawler, create_user_dict, env)
  140. Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
  141. if env == 'prod':
  142. our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
  143. else:
  144. our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
  145. Common.logger(log_type, crawler).info(f'站内用户主页链接:{our_user_link}')
  146. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  147. [[our_uid, our_user_link]])
  148. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!')
  149. sql = f""" insert into crawler_user(user_id,
  150. out_user_id,
  151. out_user_name,
  152. out_avatar_url,
  153. out_create_time,
  154. out_play_cnt,
  155. out_fans,
  156. platform,
  157. tag)
  158. values({our_uid},
  159. "{out_uid}",
  160. "{user_name}",
  161. "{out_avatar_url}",
  162. "{out_create_time}",
  163. {out_play_cnt},
  164. {out_fans},
  165. "{cls.platform}",
  166. "{tag}") """
  167. Common.logger(log_type, crawler).info(f'sql:{sql}')
  168. MysqlHelper.update_values(log_type, crawler, sql, env, machine)
  169. Common.logger(log_type, crawler).info('用户信息插入数据库成功!\n')
  170. # 数据库中(youtube + out_user_id)返回数量 != 0,则直接把数据库中的站内 UID 写入飞书
  171. else:
  172. our_uid = our_user_info[0][1]
  173. if 'env' == 'prod':
  174. our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
  175. else:
  176. our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
  177. Common.logger(log_type, crawler).info(f'站内用户主页链接:{our_user_link}')
  178. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  179. [[our_uid, our_user_link]])
  180. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  181. user_dict = {
  182. 'out_user_id': out_uid,
  183. 'out_user_name': user_name,
  184. 'out_browse_id': browse_id,
  185. 'our_user_id': our_uid,
  186. 'out_user_url': uer_url
  187. }
  188. user_list.append(user_dict)
  189. else:
  190. pass
  191. return user_list
  192. except Exception as e:
  193. Common.logger(log_type, crawler).error(f"get_user_from_feishu异常:{e}\n")
  194. @classmethod
  195. def get_continuation(cls, data):
  196. continuation = data['continuationItemRenderer']['continuationEndpoint']['continuationCommand']['token']
  197. return continuation
  198. @classmethod
  199. def get_feeds(cls, log_type, crawler, browse_id, out_uid):
  200. """
  201. 获取个人主页视频列表
  202. :param log_type: 日志
  203. :param crawler: 哪款爬虫
  204. :param browse_id: 每个用户主页的请求参数中唯一值
  205. :param out_uid: 站外用户UID
  206. :return: video_list
  207. """
  208. url = "https://www.youtube.com/youtubei/v1/browse?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8&prettyPrint=false"
  209. payload = json.dumps({
  210. "context": {
  211. "client": {
  212. "hl": "zh-CN",
  213. "gl": "US",
  214. "remoteHost": "38.93.247.21",
  215. "deviceMake": "Apple",
  216. "deviceModel": "",
  217. "visitorData": "CgtraDZfVnB4NXdIWSi6mIOfBg%3D%3D",
  218. "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36,gzip(gfe)",
  219. "clientName": "WEB",
  220. "clientVersion": "2.20230201.01.00",
  221. "osName": "Macintosh",
  222. "osVersion": "10_15_7",
  223. "originalUrl": f"https://www.youtube.com/{out_uid}/videos",
  224. "platform": "DESKTOP",
  225. "clientFormFactor": "UNKNOWN_FORM_FACTOR",
  226. "configInfo": {
  227. "appInstallData": "CLqYg58GEInorgUQuIuuBRCU-K4FENfkrgUQuNSuBRC2nP4SEPuj_hIQ5_euBRCy9a4FEKLsrgUQt-CuBRDi1K4FEILdrgUQh92uBRDM364FEP7urgUQzPWuBRDZ6a4FEOSg_hIQo_muBRDvo_4SEMnJrgUQlqf-EhCR-PwS"
  228. },
  229. "timeZone": "Asia/Shanghai",
  230. "browserName": "Chrome",
  231. "browserVersion": "109.0.0.0",
  232. "acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
  233. "deviceExperimentId": "ChxOekU1TlRReU5qWTBOVFExTVRRNU5qRTBOdz09ELqYg58GGOmU7Z4G",
  234. "screenWidthPoints": 944,
  235. "screenHeightPoints": 969,
  236. "screenPixelDensity": 1,
  237. "screenDensityFloat": 1,
  238. "utcOffsetMinutes": 480,
  239. "userInterfaceTheme": "USER_INTERFACE_THEME_LIGHT",
  240. "memoryTotalKbytes": "8000000",
  241. "mainAppWebInfo": {
  242. "graftUrl": f"/{out_uid}/videos",
  243. "pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_CAN_BE_INSTALLED",
  244. "webDisplayMode": "WEB_DISPLAY_MODE_FULLSCREEN",
  245. "isWebNativeShareAvailable": True
  246. }
  247. },
  248. "user": {
  249. "lockedSafetyMode": False
  250. },
  251. "request": {
  252. "useSsl": True,
  253. "internalExperimentFlags": [],
  254. "consistencyTokenJars": []
  255. },
  256. "clickTracking": {
  257. "clickTrackingParams": "CBcQ8JMBGAYiEwiNhIXX9IL9AhUFSUwIHWnnDks="
  258. },
  259. "adSignalsInfo": {
  260. "params": [
  261. {
  262. "key": "dt",
  263. "value": "1675676731048"
  264. },
  265. {
  266. "key": "flash",
  267. "value": "0"
  268. },
  269. {
  270. "key": "frm",
  271. "value": "0"
  272. },
  273. {
  274. "key": "u_tz",
  275. "value": "480"
  276. },
  277. {
  278. "key": "u_his",
  279. "value": "4"
  280. },
  281. {
  282. "key": "u_h",
  283. "value": "1080"
  284. },
  285. {
  286. "key": "u_w",
  287. "value": "1920"
  288. },
  289. {
  290. "key": "u_ah",
  291. "value": "1080"
  292. },
  293. {
  294. "key": "u_aw",
  295. "value": "1920"
  296. },
  297. {
  298. "key": "u_cd",
  299. "value": "24"
  300. },
  301. {
  302. "key": "bc",
  303. "value": "31"
  304. },
  305. {
  306. "key": "bih",
  307. "value": "969"
  308. },
  309. {
  310. "key": "biw",
  311. "value": "944"
  312. },
  313. {
  314. "key": "brdim",
  315. "value": "-269,-1080,-269,-1080,1920,-1080,1920,1080,944,969"
  316. },
  317. {
  318. "key": "vis",
  319. "value": "1"
  320. },
  321. {
  322. "key": "wgl",
  323. "value": "true"
  324. },
  325. {
  326. "key": "ca_type",
  327. "value": "image"
  328. }
  329. ],
  330. "bid": "ANyPxKpfiaAf-DBzNeKLgkceMEA9UIeCWFRTRm4AQMCuejhI3PGwDB1jizQIX60YcEYtt_CX7tZWAbYerQ-rWLvV7y_KCLkBww"
  331. }
  332. },
  333. # "browseId": browse_id,
  334. "params": "EgZ2aWRlb3PyBgQKAjoA",
  335. "continuation": cls.continuation
  336. })
  337. headers = {
  338. 'authority': 'www.youtube.com',
  339. 'accept': '*/*',
  340. 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  341. 'cache-control': 'no-cache',
  342. 'content-type': 'application/json',
  343. 'cookie': 'VISITOR_INFO1_LIVE=kh6_Vpx5wHY; YSC=UupqFrWvAR0; DEVICE_INFO=ChxOekU1TlRReU5qWTBOVFExTVRRNU5qRTBOdz09EOmU7Z4GGOmU7Z4G; PREF=tz=Asia.Shanghai; ST-1kg1gfd=itct=CBcQ8JMBGAYiEwiNhIXX9IL9AhUFSUwIHWnnDks%3D&csn=MC4zNzI3MDcwMDA1Mjg4NzE5Ng..&endpoint=%7B%22clickTrackingParams%22%3A%22CBcQ8JMBGAYiEwiNhIXX9IL9AhUFSUwIHWnnDks%3D%22%2C%22commandMetadata%22%3A%7B%22webCommandMetadata%22%3A%7B%22url%22%3A%22%2F%40chinatravel5971%2Fvideos%22%2C%22webPageType%22%3A%22WEB_PAGE_TYPE_CHANNEL%22%2C%22rootVe%22%3A3611%2C%22apiUrl%22%3A%22%2Fyoutubei%2Fv1%2Fbrowse%22%7D%7D%2C%22browseEndpoint%22%3A%7B%22browseId%22%3A%22UCpLXnfBCNhj8KLnt54RQMKA%22%2C%22params%22%3A%22EgZ2aWRlb3PyBgQKAjoA%22%2C%22canonicalBaseUrl%22%3A%22%2F%40chinatravel5971%22%7D%7D',
  344. 'origin': 'https://www.youtube.com',
  345. 'pragma': 'no-cache',
  346. 'referer': f'https://www.youtube.com/{out_uid}/featured',
  347. 'sec-ch-ua': '"Not_A Brand";v="99", "Chromium";v="109", "Google Chrome";v="109.0.5414.87"',
  348. 'sec-ch-ua-arch': '"arm"',
  349. 'sec-ch-ua-bitness': '"64"',
  350. 'sec-ch-ua-full-version': '"109.0.1518.52"',
  351. 'sec-ch-ua-full-version-list': '"Not_A Brand";v="99.0.0.0", "Microsoft Edge";v="109.0.1518.52", "Chromium";v="109.0.5414.87"',
  352. 'sec-ch-ua-mobile': '?0',
  353. 'sec-ch-ua-model': '',
  354. 'sec-ch-ua-platform': '"macOS"',
  355. 'sec-ch-ua-platform-version': '"12.4.0"',
  356. 'sec-ch-ua-wow64': '?0',
  357. 'sec-fetch-dest': 'empty',
  358. 'sec-fetch-mode': 'same-origin',
  359. 'sec-fetch-site': 'same-origin',
  360. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36',
  361. 'x-goog-visitor-id': 'CgtraDZfVnB4NXdIWSi6mIOfBg%3D%3D',
  362. 'x-youtube-bootstrap-logged-in': 'false',
  363. 'x-youtube-client-name': '1',
  364. 'x-youtube-client-version': '2.20230201.01.00'
  365. }
  366. try:
  367. response = requests.post(url=url, headers=headers, data=payload)
  368. # Common.logger(log_type, crawler).info(f"get_feeds_response:{response.json()}\n")
  369. cls.continuation = response.json()['trackingParams']
  370. if response.status_code != 200:
  371. Common.logger(log_type, crawler).warning(f'get_feeds_response:{response.text}\n')
  372. elif 'continuationContents' not in response.text and 'onResponseReceivedActions' not in response.text:
  373. Common.logger(log_type, crawler).warning(f'get_feeds_response:{response.text}\n')
  374. elif 'continuationContents' in response.json():
  375. # Common.logger(log_type, crawler).info("'continuationContents' in response.json()\n")
  376. if 'richGridContinuation' not in response.json()['continuationContents']:
  377. # Common.logger(log_type, crawler).warning(f"'richGridContinuation' not in response.json()['continuationContents']\n")
  378. Common.logger(log_type, crawler).warning(
  379. f'get_feeds_response:{response.json()["continuationContents"]}\n')
  380. elif 'contents' not in response.json()['continuationContents']['richGridContinuation']:
  381. Common.logger(log_type, crawler).warning(
  382. f'get_feeds_response:{response.json()["continuationContents"]["richGridContinuation"]}\n')
  383. elif 'contents' in response.json()["continuationContents"]["richGridContinuation"]:
  384. feeds = response.json()["continuationContents"]["richGridContinuation"]['contents']
  385. return feeds
  386. elif 'onResponseReceivedActions' in response.json():
  387. Common.logger(log_type, crawler).info("'onResponseReceivedActions' in response.json()\n")
  388. if len(response.json()['onResponseReceivedActions']) == 0:
  389. Common.logger(log_type, crawler).warning(
  390. f'get_feeds_response:{response.json()["onResponseReceivedActions"]}\n')
  391. elif 'appendContinuationItemsAction' not in response.json()['onResponseReceivedActions'][0]:
  392. Common.logger(log_type, crawler).warning(
  393. f'get_feeds_response:{response.json()["onResponseReceivedActions"][0]}\n')
  394. elif 'continuationItems' not in response.json()['onResponseReceivedActions'][0][
  395. 'appendContinuationItemsAction']:
  396. Common.logger(log_type, crawler).warning(
  397. f'get_feeds_response:{response.json()["onResponseReceivedActions"][0]["appendContinuationItemsAction"]}\n')
  398. elif len(response.json()['onResponseReceivedActions'][0]['appendContinuationItemsAction'][
  399. 'continuationItems']) == 0:
  400. Common.logger(log_type, crawler).warning(
  401. f'get_feeds_response:{response.json()["onResponseReceivedActions"][0]["appendContinuationItemsAction"]["continuationItems"]}\n')
  402. else:
  403. feeds = response.json()["onResponseReceivedActions"][0]["appendContinuationItemsAction"][
  404. "continuationItems"]
  405. return feeds
  406. else:
  407. Common.logger(log_type, crawler).info('feeds is None\n')
  408. except Exception as e:
  409. Common.logger(log_type, crawler).error(f'get_feeds异常:{e}\n')
  410. @classmethod
  411. def get_first_page(cls, user_url):
  412. try:
  413. res = requests.get(url=user_url, headers=cls.headers)
  414. info = re.findall(r'var ytInitialData = (.*?);', res.text, re.S)[0]
  415. ytInitialData = json.loads(info)
  416. video_list = \
  417. ytInitialData['contents']['twoColumnBrowseResultsRenderer']['tabs'][1]['tabRenderer']['content'][
  418. 'richGridRenderer']['contents']
  419. except Exception as e:
  420. video_list = []
  421. return video_list
  422. @classmethod
  423. def get_next_page(cls, log_type, crawler, strategy, oss_endpoint, env, out_uid, our_uid,
  424. machine, out_user_url, continuation):
  425. post_url = "https://www.youtube.com/youtubei/v1/browse?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8&prettyPrint=false"
  426. payload = json.dumps({
  427. "context": {
  428. "client": {
  429. "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36,gzip(gfe)",
  430. "clientName": "WEB",
  431. "clientVersion": "2.20230221.06.00",
  432. "osName": "Macintosh",
  433. "osVersion": "10_15_7",
  434. "originalUrl": "https://www.youtube.com/@wongkim728/videos",
  435. "screenPixelDensity": 2,
  436. "platform": "DESKTOP",
  437. "clientFormFactor": "UNKNOWN_FORM_FACTOR",
  438. "configInfo": {
  439. "appInstallData": "CKWy258GEOWg_hIQzN-uBRC4rP4SEOf3rgUQzPWuBRCi7K4FEMiJrwUQieiuBRDshq8FENrprgUQ4tSuBRD-7q4FEKOArwUQgt2uBRC2nP4SEJT4rgUQuIuuBRCH3a4FELjUrgUQjqj-EhCR-PwS"
  440. },
  441. "screenDensityFloat": 2,
  442. "timeZone": "Asia/Shanghai",
  443. "browserName": "Chrome",
  444. "browserVersion": "110.0.0.0",
  445. "acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
  446. "deviceExperimentId": "ChxOekl3TWpVek9UQXpPVE13TnpJd056a3pNZz09EKWy258GGJie0p8G",
  447. "screenWidthPoints": 576,
  448. "screenHeightPoints": 764,
  449. "utcOffsetMinutes": 480,
  450. "userInterfaceTheme": "USER_INTERFACE_THEME_LIGHT",
  451. "connectionType": "CONN_CELLULAR_4G",
  452. "memoryTotalKbytes": "8000000",
  453. "mainAppWebInfo": {
  454. "graftUrl": out_user_url,
  455. "pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_CAN_BE_INSTALLED",
  456. "webDisplayMode": "WEB_DISPLAY_MODE_FULLSCREEN",
  457. "isWebNativeShareAvailable": False
  458. }
  459. },
  460. "user": {
  461. "lockedSafetyMode": False
  462. },
  463. "request": {
  464. "useSsl": True,
  465. "internalExperimentFlags": [],
  466. "consistencyTokenJars": []
  467. },
  468. "clickTracking": {
  469. "clickTrackingParams": ""
  470. },
  471. "adSignalsInfo": {
  472. "params": [],
  473. "bid": "ANyPxKo8EXfKNGm3gYLAqhR5HA90FSKMvQf43tk3KV_XUWB5xi_0OxAo2TJTfoVx_516NRxz0qwRg-1x2kD-IVt7LPKrRHkJBA"
  474. }
  475. },
  476. "continuation": continuation
  477. })
  478. headers = {
  479. # 'authorization': 'SAPISIDHASH 1677121838_f5055bd4b4c242d18af423b37ac0f556bf1dfc30',
  480. 'content-type': 'application/json',
  481. 'cookie': 'VISITOR_INFO1_LIVE=HABZsLFdU40; DEVICE_INFO=ChxOekl3TWpVek9UQXpPVE13TnpJd056a3pNZz09EJie0p8GGJie0p8G; PREF=f4=4000000&tz=Asia.Shanghai; HSID=AxFp7ylWWebUZYqrl; SSID=ANHuSQMqvVcV0vVNn; APISID=AkwZgjPvFZ6LZCrE/Aiv0K-2rEUzY1bH1u; SAPISID=8yRrBMHYXAhqkybH/AEFGJvzZ3tPalnTy0; __Secure-1PAPISID=8yRrBMHYXAhqkybH/AEFGJvzZ3tPalnTy0; __Secure-3PAPISID=8yRrBMHYXAhqkybH/AEFGJvzZ3tPalnTy0; SID=TwjWkM4mrKb4o8pRKbyQVqELjNU43ZL0bF8QB2hdTI9z05T4Koo9aQoNQfX1AiGFWeD7WA.; __Secure-1PSID=TwjWkM4mrKb4o8pRKbyQVqELjNU43ZL0bF8QB2hdTI9z05T4bs4qvvXffLLTXq_VYw0XLw.; __Secure-3PSID=TwjWkM4mrKb4o8pRKbyQVqELjNU43ZL0bF8QB2hdTI9z05T4cNwzpudzvCglfQ5A1FJnog.; LOGIN_INFO=AFmmF2swRAIgO4TvR9xxWoHPgrGoGAEVo-P8Slqem__vIdF_oajjRiECIFiq4YtbL_IQGCbkjrHsWkWH6OpzKd8RlgdS6qNurR0Q:QUQ3MjNmejV5WkRVUmZXVlFjbjY0dW1aVGpoZkZQdmxYamIzV01zc0lmT3JiQl9ldVYwc0t4dlNkbWpoVEdJMHVaWjZXVEt3ZERQeUppU3AyNmR6ckFucWltZU5LNmZjQ3lHUEtKTDBzSlo5WXpJQzF3UlNCVlp2Q1ZKVmxtRk05OHRuWFFiWGphcFpPblFOUURWTlVxVGtBazVjcmVtS2pR; YSC=CtX0f3NennA; SIDCC=AFvIBn9aXC4vNCbg5jPzjbC8LMYCBVx_dy8uJO20b-768rmRfP9f5BqQ_xXspPemecVq29qZ7A; __Secure-1PSIDCC=AFvIBn-4TD_lPaKgbmYAGO6hZluLgSgbWgb7XAcaeNG6982LIIpS_Gb9vkqHTBMyCGvb4x7m6jk; __Secure-3PSIDCC=AFvIBn9ypvGX15qq4CsnsuhWTaXa9yMTxWMWbIDXtr6L3XZD81XBUQ0IMUv9ZKh9mf8NEbSvOy0; SIDCC=AFvIBn_DwLbohF2llhq4EQjFDFA3n9-_AK_7ITJsTZtCeYwy43J8KCYUPfY7ghqX9s-Qq5dOIQ; __Secure-1PSIDCC=AFvIBn-7x_HhxbmDkOzXew-sXAEWVuUGpglr8rypU623IyO8Y9OungcqMkuxBZQ2vr6G7x9UcxM; __Secure-3PSIDCC=AFvIBn-7aSYRxZkCKZp7-Mdn9PwbW4CUtXD0ok0nCvPIZXfkFrN9VqN1BHkI1fUaoIo_8YCjwRs',
  482. 'origin': 'https://www.youtube.com',
  483. 'referer': out_user_url,
  484. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36',
  485. }
  486. try:
  487. res = requests.request("POST", post_url, headers=headers, data=payload).json()
  488. video_infos = res['onResponseReceivedActions'][0]['appendContinuationItemsAction']['continuationItems']
  489. for data in video_infos:
  490. if 'richItemRenderer' in data:
  491. video_id = data["richItemRenderer"]["content"]['videoRenderer']['videoId']
  492. video_dict = cls.get_video_info(log_type, crawler, out_uid, video_id, machine)
  493. # video_dict = cls.parse_video(video_dict, log_type, crawler, out_uid, video_id, machine)
  494. # 发布时间<=7天
  495. publish_time = int(time.mktime(time.strptime(video_dict['publish_time'], "%Y-%m-%d")))
  496. if int(time.time()) - publish_time <= 3600 * 24 * 7:
  497. cls.download_publish(log_type, crawler, video_dict, strategy, our_uid, env, oss_endpoint,
  498. machine)
  499. else:
  500. Common.logger(log_type, crawler).info('发布时间超过7天\n')
  501. return
  502. else:
  503. continuation = cls.get_continuation(data)
  504. cls.get_next_page(log_type, crawler, strategy, oss_endpoint, env, out_uid, our_uid,
  505. machine, out_user_url, continuation)
  506. except:
  507. return
  508. @classmethod
  509. def get_videos(cls, log_type, crawler, strategy, oss_endpoint, env, out_uid, our_uid,
  510. machine, out_user_url):
  511. try:
  512. feeds = cls.get_first_page(out_user_url)
  513. for data in feeds:
  514. if 'richItemRenderer' in data:
  515. video_id = data["richItemRenderer"]["content"]['videoRenderer']['videoId']
  516. video_dict = cls.get_video_info(log_type, crawler, out_uid, video_id, machine)
  517. # 发布时间<=7天
  518. publish_time = int(time.mktime(time.strptime(video_dict['publish_time'], "%Y-%m-%d")))
  519. if int(time.time()) - publish_time <= 3600 * 24 * 7:
  520. cls.download_publish(log_type, crawler, video_dict, strategy, our_uid, env, oss_endpoint,
  521. machine)
  522. else:
  523. Common.logger(log_type, crawler).info('发布时间超过7天\n')
  524. return
  525. else:
  526. continuation = cls.get_continuation(data)
  527. cls.get_next_page(log_type, crawler, strategy, oss_endpoint, env, out_uid, our_uid,
  528. machine, out_user_url, continuation=continuation)
  529. except Exception as e:
  530. Common.logger(log_type, crawler).error(f"get_videos异常:{e}\n")
  531. @classmethod
  532. def filter_emoji(cls, title):
  533. # 过滤表情
  534. try:
  535. co = re.compile(u'[\U00010000-\U0010ffff]')
  536. except re.error:
  537. co = re.compile(u'[\uD800-\uDBFF][\uDC00-\uDFFF]')
  538. return co.sub("", title)
  539. @classmethod
  540. def is_contain_chinese(cls, strword):
  541. for ch in strword:
  542. if u'\u4e00' <= ch <= u'\u9fff':
  543. return True
  544. return False
  545. @classmethod
  546. def parse_video(cls, video_dict, log_type, crawler, out_uid, video_id, machine):
  547. try:
  548. if 'streamingData' not in video_dict:
  549. Common.logger(log_type, crawler).warning(f"get_video_info_response:{video_dict}\n")
  550. elif 'videoDetails' not in video_dict:
  551. Common.logger(log_type, crawler).warning(f"get_video_info_response:{video_dict}\n")
  552. elif 'microformat' not in video_dict:
  553. Common.logger(log_type, crawler).warning(f"get_video_info_response:{video_dict}\n")
  554. else:
  555. playerMicroformatRenderer = video_dict['microformat']['playerMicroformatRenderer']
  556. videoDetails = video_dict['videoDetails']
  557. # streamingData = response.json()['streamingData']
  558. # video_title
  559. if 'title' not in videoDetails:
  560. video_title = ''
  561. else:
  562. video_title = videoDetails['title']
  563. video_title = cls.filter_emoji(video_title)
  564. # if Translate.is_contains_chinese(video_title) is False:
  565. if not cls.is_contain_chinese(video_title):
  566. video_title = Translate.google_translate(video_title, machine) \
  567. .strip().replace("\\", "").replace(" ", "").replace("\n", "") \
  568. .replace("/", "").replace("\r", "").replace("&NBSP", "").replace("&", "") \
  569. .replace(";", "").replace("amp;", "") # 自动翻译标题为中文
  570. if 'lengthSeconds' not in videoDetails:
  571. duration = 0
  572. else:
  573. duration = int(videoDetails['lengthSeconds'])
  574. # play_cnt
  575. if 'viewCount' not in videoDetails:
  576. play_cnt = 0
  577. else:
  578. play_cnt = int(videoDetails['viewCount'])
  579. # publish_time
  580. if 'publishDate' not in playerMicroformatRenderer:
  581. publish_time = ''
  582. else:
  583. publish_time = playerMicroformatRenderer['publishDate']
  584. if publish_time == '':
  585. publish_time_stamp = 0
  586. elif ':' in publish_time:
  587. publish_time_stamp = int(time.mktime(time.strptime(publish_time, "%Y-%m-%d %H:%M:%S")))
  588. else:
  589. publish_time_stamp = int(time.mktime(time.strptime(publish_time, "%Y-%m-%d")))
  590. # user_name
  591. if 'author' not in videoDetails:
  592. user_name = ''
  593. else:
  594. user_name = videoDetails['author']
  595. # cover_url
  596. if 'thumbnail' not in videoDetails:
  597. cover_url = ''
  598. elif 'thumbnails' not in videoDetails['thumbnail']:
  599. cover_url = ''
  600. elif len(videoDetails['thumbnail']['thumbnails']) == 0:
  601. cover_url = ''
  602. elif 'url' not in videoDetails['thumbnail']['thumbnails'][-1]:
  603. cover_url = ''
  604. else:
  605. cover_url = videoDetails['thumbnail']['thumbnails'][-1]['url']
  606. # video_url
  607. # if 'formats' not in streamingData:
  608. # video_url = ''
  609. # elif len(streamingData['formats']) == 0:
  610. # video_url = ''
  611. # elif 'url' not in streamingData['formats'][-1]:
  612. # video_url = ''
  613. # else:
  614. # video_url = streamingData['formats'][-1]['url']
  615. video_url = f"https://www.youtube.com/watch?v={video_id}"
  616. Common.logger(log_type, crawler).info(f'video_title:{video_title}')
  617. Common.logger(log_type, crawler).info(f'video_id:{video_id}')
  618. Common.logger(log_type, crawler).info(f'play_cnt:{play_cnt}')
  619. Common.logger(log_type, crawler).info(f'publish_time:{publish_time}')
  620. Common.logger(log_type, crawler).info(f'user_name:{user_name}')
  621. Common.logger(log_type, crawler).info(f'cover_url:{cover_url}')
  622. Common.logger(log_type, crawler).info(f'video_url:{video_url}')
  623. video_dict = {
  624. 'video_title': video_title,
  625. 'video_id': video_id,
  626. 'duration': duration,
  627. 'play_cnt': play_cnt,
  628. 'publish_time': publish_time,
  629. 'publish_time_stamp': publish_time_stamp,
  630. 'user_name': user_name,
  631. 'out_uid': out_uid,
  632. 'cover_url': cover_url,
  633. 'video_url': video_url,
  634. }
  635. return video_dict
  636. except Exception as e:
  637. Common.logger(log_type, crawler).error(f"get_video_info异常:{e}\n")
  638. @classmethod
  639. def get_video_info(cls, log_type, crawler, out_uid, video_id, machine):
  640. try:
  641. url = "https://www.youtube.com/youtubei/v1/player?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8&prettyPrint=false"
  642. payload = json.dumps({
  643. "context": {
  644. "client": {
  645. "hl": "zh-CN",
  646. "gl": "US",
  647. "remoteHost": "38.93.247.21",
  648. "deviceMake": "Apple",
  649. "deviceModel": "",
  650. "visitorData": "CgtraDZfVnB4NXdIWSjkzoefBg%3D%3D",
  651. "userAgent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36,gzip(gfe)",
  652. "clientName": "WEB",
  653. "clientVersion": "2.20230201.01.00",
  654. "osName": "Macintosh",
  655. "osVersion": "10_15_7",
  656. "originalUrl": f"https://www.youtube.com/watch?v={video_id}",
  657. "platform": "DESKTOP",
  658. "clientFormFactor": "UNKNOWN_FORM_FACTOR",
  659. "configInfo": {
  660. "appInstallData": "COTOh58GEPuj_hIQ1-SuBRC4i64FEMzfrgUQgt2uBRCi7K4FEOLUrgUQzPWuBRCKgK8FEOSg_hIQtpz-EhDa6a4FEP7urgUQieiuBRDn964FELjUrgUQlPiuBRCH3a4FELfgrgUQ76P-EhDJya4FEJan_hIQkfj8Eg%3D%3D"
  661. },
  662. "timeZone": "Asia/Shanghai",
  663. "browserName": "Chrome",
  664. "browserVersion": "109.0.0.0",
  665. "acceptHeader": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
  666. "deviceExperimentId": "ChxOekU1TlRReU5qWTBOVFExTVRRNU5qRTBOdz09EOTOh58GGOmU7Z4G",
  667. "screenWidthPoints": 1037,
  668. "screenHeightPoints": 969,
  669. "screenPixelDensity": 1,
  670. "screenDensityFloat": 1,
  671. "utcOffsetMinutes": 480,
  672. "userInterfaceTheme": "USER_INTERFACE_THEME_LIGHT",
  673. "memoryTotalKbytes": "8000000",
  674. "clientScreen": "WATCH",
  675. "mainAppWebInfo": {
  676. "graftUrl": f"/watch?v={video_id}",
  677. "pwaInstallabilityStatus": "PWA_INSTALLABILITY_STATUS_CAN_BE_INSTALLED",
  678. "webDisplayMode": "WEB_DISPLAY_MODE_FULLSCREEN",
  679. "isWebNativeShareAvailable": True
  680. }
  681. },
  682. "user": {
  683. "lockedSafetyMode": False
  684. },
  685. "request": {
  686. "useSsl": True,
  687. "internalExperimentFlags": [],
  688. "consistencyTokenJars": []
  689. },
  690. "clickTracking": {
  691. "clickTrackingParams": "CIwBEKQwGAYiEwipncqx3IL9AhXs4cQKHbKZDO4yB3JlbGF0ZWRInsS1qbGFtIlUmgEFCAEQ-B0="
  692. },
  693. "adSignalsInfo": {
  694. "params": [
  695. {
  696. "key": "dt",
  697. "value": "1675749222611"
  698. },
  699. {
  700. "key": "flash",
  701. "value": "0"
  702. },
  703. {
  704. "key": "frm",
  705. "value": "0"
  706. },
  707. {
  708. "key": "u_tz",
  709. "value": "480"
  710. },
  711. {
  712. "key": "u_his",
  713. "value": "3"
  714. },
  715. {
  716. "key": "u_h",
  717. "value": "1080"
  718. },
  719. {
  720. "key": "u_w",
  721. "value": "1920"
  722. },
  723. {
  724. "key": "u_ah",
  725. "value": "1080"
  726. },
  727. {
  728. "key": "u_aw",
  729. "value": "1920"
  730. },
  731. {
  732. "key": "u_cd",
  733. "value": "24"
  734. },
  735. {
  736. "key": "bc",
  737. "value": "31"
  738. },
  739. {
  740. "key": "bih",
  741. "value": "969"
  742. },
  743. {
  744. "key": "biw",
  745. "value": "1037"
  746. },
  747. {
  748. "key": "brdim",
  749. "value": "-269,-1080,-269,-1080,1920,-1080,1920,1080,1037,969"
  750. },
  751. {
  752. "key": "vis",
  753. "value": "1"
  754. },
  755. {
  756. "key": "wgl",
  757. "value": "true"
  758. },
  759. {
  760. "key": "ca_type",
  761. "value": "image"
  762. }
  763. ],
  764. "bid": "ANyPxKop8SijebwUCq4ZfKbJwlSjVQa_RTdS6c6a6WPYpCKnxpWCJ33B1SzRuSXjSfH9O2MhURebAs0CngRg6B4nOjBpeJDKgA"
  765. }
  766. },
  767. "videoId": str(video_id),
  768. "playbackContext": {
  769. "contentPlaybackContext": {
  770. "currentUrl": f"/watch?v={video_id}",
  771. "vis": 0,
  772. "splay": False,
  773. "autoCaptionsDefaultOn": False,
  774. "autonavState": "STATE_NONE",
  775. "html5Preference": "HTML5_PREF_WANTS",
  776. "signatureTimestamp": 19394,
  777. "referer": f"https://www.youtube.com/watch?v={video_id}",
  778. "lactMilliseconds": "-1",
  779. "watchAmbientModeContext": {
  780. "watchAmbientModeEnabled": True
  781. }
  782. }
  783. },
  784. "racyCheckOk": False,
  785. "contentCheckOk": False
  786. })
  787. headers = {
  788. 'authority': 'www.youtube.com',
  789. 'accept': '*/*',
  790. 'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  791. 'cache-control': 'no-cache',
  792. 'content-type': 'application/json',
  793. 'cookie': f'VISITOR_INFO1_LIVE=kh6_Vpx5wHY; YSC=UupqFrWvAR0; DEVICE_INFO=ChxOekU1TlRReU5qWTBOVFExTVRRNU5qRTBOdz09EOmU7Z4GGOmU7Z4G; PREF=tz=Asia.Shanghai; ST-180dxzo=itct=CIwBEKQwGAYiEwipncqx3IL9AhXs4cQKHbKZDO4yB3JlbGF0ZWRInsS1qbGFtIlUmgEFCAEQ-B0%3D&csn=MC41MTQ1NTQzMTE3NTA4MjY0&endpoint=%7B%22clickTrackingParams%22%3A%22CIwBEKQwGAYiEwipncqx3IL9AhXs4cQKHbKZDO4yB3JlbGF0ZWRInsS1qbGFtIlUmgEFCAEQ-B0%3D%22%2C%22commandMetadata%22%3A%7B%22webCommandMetadata%22%3A%7B%22url%22%3A%22%2Fwatch%3Fv%3D{video_id}%22%2C%22webPageType%22%3A%22WEB_PAGE_TYPE_WATCH%22%2C%22rootVe%22%3A3832%7D%7D%2C%22watchEndpoint%22%3A%7B%22videoId%22%3A%22{video_id}%22%2C%22nofollow%22%3Atrue%2C%22watchEndpointSupportedOnesieConfig%22%3A%7B%22html5PlaybackOnesieConfig%22%3A%7B%22commonConfig%22%3A%7B%22url%22%3A%22https%3A%2F%2Frr5---sn-nx5s7n76.googlevideo.com%2Finitplayback%3Fsource%3Dyoutube%26oeis%3D1%26c%3DWEB%26oad%3D3200%26ovd%3D3200%26oaad%3D11000%26oavd%3D11000%26ocs%3D700%26oewis%3D1%26oputc%3D1%26ofpcc%3D1%26msp%3D1%26odepv%3D1%26id%3D38654ad085c12212%26ip%3D38.93.247.21%26initcwndbps%3D11346250%26mt%3D1675748964%26oweuc%3D%26pxtags%3DCg4KAnR4EggyNDQ1MTI4OA%26rxtags%3DCg4KAnR4EggyNDQ1MTI4Ng%252CCg4KAnR4EggyNDQ1MTI4Nw%252CCg4KAnR4EggyNDQ1MTI4OA%252CCg4KAnR4EggyNDQ1MTI4OQ%22%7D%7D%7D%7D%7D',
  794. 'origin': 'https://www.youtube.com',
  795. 'pragma': 'no-cache',
  796. 'referer': f'https://www.youtube.com/watch?v={video_id}',
  797. 'sec-ch-ua': '"Not_A Brand";v="99", "Chromium";v="109", "Google Chrome";v="109.0.5414.87"',
  798. 'sec-ch-ua-arch': '"arm"',
  799. 'sec-ch-ua-bitness': '"64"',
  800. 'sec-ch-ua-full-version': '"109.0.1518.52"',
  801. 'sec-ch-ua-full-version-list': '"Not_A Brand";v="99.0.0.0", "Microsoft Edge";v="109.0.1518.52", "Chromium";v="109.0.5414.87"',
  802. 'sec-ch-ua-mobile': '?0',
  803. 'sec-ch-ua-model': '',
  804. 'sec-ch-ua-platform': '"macOS"',
  805. 'sec-ch-ua-platform-version': '"12.4.0"',
  806. 'sec-ch-ua-wow64': '?0',
  807. 'sec-fetch-dest': 'empty',
  808. 'sec-fetch-mode': 'same-origin',
  809. 'sec-fetch-site': 'same-origin',
  810. 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36',
  811. 'x-goog-visitor-id': 'CgtraDZfVnB4NXdIWSjkzoefBg%3D%3D',
  812. 'x-youtube-bootstrap-logged-in': 'false',
  813. 'x-youtube-client-name': '1',
  814. 'x-youtube-client-version': '2.20230201.01.00'
  815. }
  816. response = requests.post(url=url, headers=headers, data=payload)
  817. if response.status_code != 200:
  818. Common.logger(log_type, crawler).warning(f"get_video_info_response:{response.text}\n")
  819. elif 'streamingData' not in response.json():
  820. Common.logger(log_type, crawler).warning(f"get_video_info_response:{response.json()}\n")
  821. elif 'videoDetails' not in response.json():
  822. Common.logger(log_type, crawler).warning(f"get_video_info_response:{response.json()}\n")
  823. elif 'microformat' not in response.json():
  824. Common.logger(log_type, crawler).warning(f"get_video_info_response:{response.json()}\n")
  825. else:
  826. playerMicroformatRenderer = response.json()['microformat']['playerMicroformatRenderer']
  827. videoDetails = response.json()['videoDetails']
  828. # streamingData = response.json()['streamingData']
  829. # video_title
  830. if 'title' not in videoDetails:
  831. video_title = ''
  832. else:
  833. video_title = videoDetails['title'].replace('"', '').replace("'", '')
  834. video_title = cls.filter_emoji(video_title)
  835. if not cls.is_contain_chinese(video_title):
  836. video_title = Translate.google_translate(video_title, machine) \
  837. .strip().replace("\\", "").replace(" ", "").replace("\n", "") \
  838. .replace("/", "").replace("\r", "").replace("&NBSP", "").replace("&", "") \
  839. .replace(";", "").replace("amp;", "") # 自动翻译标题为中文
  840. if 'lengthSeconds' not in videoDetails:
  841. duration = 0
  842. else:
  843. duration = int(videoDetails['lengthSeconds'])
  844. # play_cnt
  845. if 'viewCount' not in videoDetails:
  846. play_cnt = 0
  847. else:
  848. play_cnt = int(videoDetails['viewCount'])
  849. # publish_time
  850. if 'publishDate' not in playerMicroformatRenderer:
  851. publish_time = ''
  852. else:
  853. publish_time = playerMicroformatRenderer['publishDate']
  854. if publish_time == '':
  855. publish_time_stamp = 0
  856. elif ':' in publish_time:
  857. publish_time_stamp = int(time.mktime(time.strptime(publish_time, "%Y-%m-%d %H:%M:%S")))
  858. else:
  859. publish_time_stamp = int(time.mktime(time.strptime(publish_time, "%Y-%m-%d")))
  860. # user_name
  861. if 'author' not in videoDetails:
  862. user_name = ''
  863. else:
  864. user_name = videoDetails['author']
  865. # cover_url
  866. if 'thumbnail' not in videoDetails:
  867. cover_url = ''
  868. elif 'thumbnails' not in videoDetails['thumbnail']:
  869. cover_url = ''
  870. elif len(videoDetails['thumbnail']['thumbnails']) == 0:
  871. cover_url = ''
  872. elif 'url' not in videoDetails['thumbnail']['thumbnails'][-1]:
  873. cover_url = ''
  874. else:
  875. cover_url = videoDetails['thumbnail']['thumbnails'][-1]['url']
  876. # video_url
  877. # if 'formats' not in streamingData:
  878. # video_url = ''
  879. # elif len(streamingData['formats']) == 0:
  880. # video_url = ''
  881. # elif 'url' not in streamingData['formats'][-1]:
  882. # video_url = ''
  883. # else:
  884. # video_url = streamingData['formats'][-1]['url']
  885. video_url = f"https://www.youtube.com/watch?v={video_id}"
  886. Common.logger(log_type, crawler).info(f'video_title:{video_title}')
  887. Common.logger(log_type, crawler).info(f'video_id:{video_id}')
  888. Common.logger(log_type, crawler).info(f'play_cnt:{play_cnt}')
  889. Common.logger(log_type, crawler).info(f'publish_time:{publish_time}')
  890. Common.logger(log_type, crawler).info(f'user_name:{user_name}')
  891. Common.logger(log_type, crawler).info(f'cover_url:{cover_url}')
  892. Common.logger(log_type, crawler).info(f'video_url:{video_url}')
  893. video_dict = {
  894. 'video_title': video_title,
  895. 'video_id': video_id,
  896. 'duration': duration,
  897. 'play_cnt': play_cnt,
  898. 'publish_time': publish_time,
  899. 'publish_time_stamp': publish_time_stamp,
  900. 'user_name': user_name,
  901. 'out_uid': out_uid,
  902. 'cover_url': cover_url,
  903. 'video_url': video_url,
  904. }
  905. return video_dict
  906. except Exception as e:
  907. Common.logger(log_type, crawler).error(f"get_video_info异常:{e}\n")
  908. @classmethod
  909. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  910. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  911. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  912. return len(repeat_video)
  913. @classmethod
  914. def download_publish(cls, log_type, crawler, video_dict, strategy, our_uid, env, oss_endpoint, machine):
  915. try:
  916. filter_words = get_config_from_mysql(log_type, crawler, env, text='filter', action='get_author_map')
  917. for filter_word in filter_words:
  918. if filter_word in video_dict['video_title']:
  919. Common.logger(log_type, crawler).info('标题已中过滤词:{}\n', video_dict['video_title'])
  920. return
  921. if video_dict['video_title'] == '' or video_dict['video_url'] == '':
  922. Common.logger(log_type, crawler).info('无效视频\n')
  923. elif video_dict['duration'] > 1200 or video_dict['duration'] < 60:
  924. Common.logger(log_type, crawler).info(f"时长:{video_dict['duration']}不满足规则\n")
  925. # elif repeat_video is not None and len(repeat_video) != 0:
  926. elif cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  927. Common.logger(log_type, crawler).info('视频已下载\n')
  928. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, crawler, 'GVxlYk') for x in y]:
  929. Common.logger(log_type, crawler).info('视频已下载\n')
  930. else:
  931. # 下载视频
  932. Common.logger(log_type, crawler).info('开始下载视频...')
  933. # Common.download_method(log_type, crawler, 'video', video_dict['video_title'], video_dict['video_url'])
  934. Common.download_method(log_type, crawler, 'youtube_video', video_dict['video_title'],
  935. video_dict['video_url'])
  936. # ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
  937. # video_width = int(ffmpeg_dict['width'])
  938. # video_height = int(ffmpeg_dict['height'])
  939. # video_size = int(ffmpeg_dict['size'])
  940. video_width = 1280
  941. video_height = 720
  942. duration = int(video_dict['duration'])
  943. Common.logger(log_type, crawler).info(f'video_width:{video_width}')
  944. Common.logger(log_type, crawler).info(f'video_height:{video_height}')
  945. Common.logger(log_type, crawler).info(f'duration:{duration}')
  946. # Common.logger(log_type, crawler).info(f'video_size:{video_size}\n')
  947. video_dict['video_width'] = video_width
  948. video_dict['video_height'] = video_height
  949. video_dict['duration'] = duration
  950. video_dict['comment_cnt'] = 0
  951. video_dict['like_cnt'] = 0
  952. video_dict['share_cnt'] = 0
  953. video_dict['avatar_url'] = video_dict['cover_url']
  954. video_dict['session'] = f'youtube{int(time.time())}'
  955. rule = '1,2'
  956. # if duration < 60 or duration > 600:
  957. # # 删除视频文件夹
  958. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
  959. # Common.logger(log_type, crawler).info(f"时长:{video_dict['duration']}不满足抓取规则,删除成功\n")
  960. # return
  961. # if duration == 0 or duration is None:
  962. # # 删除视频文件夹
  963. # shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
  964. # Common.logger(log_type, crawler).info(f"视频下载出错,删除成功\n")
  965. # return
  966. # else:
  967. # 下载封面
  968. Common.download_method(log_type, crawler, 'cover', video_dict['video_title'], video_dict['cover_url'])
  969. # 保存视频文本信息
  970. Common.save_video_info(log_type, crawler, video_dict)
  971. # 上传视频
  972. Common.logger(log_type, crawler).info(f"开始上传视频")
  973. if env == 'dev':
  974. our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
  975. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  976. else:
  977. our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
  978. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  979. Common.logger(log_type, crawler).info("视频上传完成")
  980. if our_video_id is None:
  981. # 删除视频文件夹
  982. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
  983. return
  984. # 视频信息保存至飞书
  985. Feishu.insert_columns(log_type, crawler, "GVxlYk", "ROWS", 1, 2)
  986. # 视频ID工作表,首行写入数据
  987. upload_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
  988. values = [[upload_time,
  989. "定向榜",
  990. video_dict['video_id'],
  991. video_dict['video_title'],
  992. our_video_link,
  993. video_dict['play_cnt'],
  994. video_dict['duration'],
  995. f'{video_width}*{video_height}',
  996. video_dict['publish_time'],
  997. video_dict['user_name'],
  998. video_dict['cover_url'],
  999. video_dict['video_url']
  1000. ]]
  1001. # time.sleep(1)
  1002. Feishu.update_values(log_type, crawler, "GVxlYk", "F2:Z2", values)
  1003. Common.logger(log_type, crawler).info('视频信息写入定向_已下载表成功\n')
  1004. # 视频信息保存数据库
  1005. sql = f""" insert into crawler_video(video_id,
  1006. user_id,
  1007. out_user_id,
  1008. platform,
  1009. strategy,
  1010. out_video_id,
  1011. video_title,
  1012. cover_url,
  1013. video_url,
  1014. duration,
  1015. publish_time,
  1016. play_cnt,
  1017. crawler_rule,
  1018. width,
  1019. height)
  1020. values({our_video_id},
  1021. "{our_uid}",
  1022. "{video_dict['out_uid']}",
  1023. "{cls.platform}",
  1024. "定向爬虫策略",
  1025. "{video_dict['video_id']}",
  1026. "{video_dict['video_title']}",
  1027. "{video_dict['cover_url']}",
  1028. "{video_dict['video_url']}",
  1029. {int(duration)},
  1030. "{video_dict['publish_time']}",
  1031. {int(video_dict['play_cnt'])},
  1032. "{rule}",
  1033. {int(video_width)},
  1034. {int(video_height)}) """
  1035. MysqlHelper.update_values(log_type, crawler, sql, env, machine)
  1036. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  1037. except Exception as e:
  1038. Common.logger(log_type, crawler).info(f"download_publish异常:{e}\n")
  1039. @classmethod
  1040. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  1041. try:
  1042. # user_list = cls.get_user_from_feishu(log_type, crawler, 'c467d7', env, machine)
  1043. user_list = get_user_from_mysql(log_type, crawler, crawler, env, action='get_author_map')
  1044. if len(user_list) == 0:
  1045. Common.logger(log_type, crawler).warning('用户列表为空\n')
  1046. else:
  1047. for user_dict in user_list:
  1048. out_user_url = user_dict['spider_link']
  1049. out_uid = out_user_url.split('/')[3]
  1050. user_name = user_dict['nick_name']
  1051. our_uid = user_dict['media_id']
  1052. Common.logger(log_type, crawler).info(f'获取 {user_name} 主页视频\n')
  1053. cls.get_videos(log_type, crawler, strategy, oss_endpoint, env, out_uid, our_uid, machine,
  1054. out_user_url)
  1055. # Common.logger(log_type, crawler).info('休眠 10 秒')
  1056. # time.sleep(random.randint(1, 2))
  1057. cls.continuation = ''
  1058. except Exception as e:
  1059. Common.logger(log_type, crawler).error(f"get_follow_videos异常:{e}\n")
  1060. if __name__ == "__main__":
  1061. # print(YoutubeFollow.get_browse_id('follow', 'youtube', '@chinatravel5971', "local"))
  1062. # print(YoutubeFollow.get_user_from_feishu('follow', 'youtube', 'c467d7', 'dev', 'local'))
  1063. print(YoutubeFollow.get_user_from_feishu('follow', 'youtube', 'c467d7', 'prod', 'prod'))
  1064. # YoutubeFollow.get_out_user_info('follow', 'youtube', 'UC08jgxf119fzynp2uHCvZIg', '@weitravel')
  1065. # YoutubeFollow.get_video_info('follow', 'youtube', 'OGVK0IXBIhI')
  1066. # YoutubeFollow.get_follow_videos('follow', 'youtube', 'youtube_follow', 'hk', 'dev', 'local')
  1067. # print(YoutubeFollow.filter_emoji("姐妹倆一唱一和,完美配合,終於把大慶降服了😅😅#萌娃搞笑日常"))
  1068. # YoutubeFollow.repeat_video('follow', 'youtube', 4, "dev", "local")
  1069. # title = "'西部巡游220丨两人一车环游中国半年,需要花费多少钱? 2万公里吃住行费用总结'"
  1070. # title = "'Insanely Crowded Shanghai Yu Garden Lantern Festival Walk Tour 2023 人气爆棚的上海豫园元宵节漫步之行 4K'"
  1071. # print(title.strip().replace("\\", "").replace(" ", "").replace("\n", "").replace("/", "").replace("\r", "").replace("&NBSP", "").replace("&", ""))