kuaishou_follow.py 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676
  1. # -*- coding: utf-8 -*-
  2. # @Author: wangkun
  3. # @Time: 2023/2/24
  4. import os
  5. import random
  6. import shutil
  7. import sys
  8. import time
  9. import requests
  10. import json
  11. import urllib3
  12. sys.path.append(os.getcwd())
  13. from common.common import Common
  14. from common.feishu import Feishu
  15. from common.users import Users
  16. from common.db import MysqlHelper
  17. from common.publish import Publish
  18. class Follow:
  19. platform = "快手"
  20. tag = "快手爬虫,定向爬虫策略"
  21. @classmethod
  22. def get_rule(cls, log_type, crawler, index):
  23. try:
  24. while True:
  25. rule_sheet = Feishu.get_values_batch(log_type, crawler, "3iqG4z")
  26. if rule_sheet is None:
  27. Common.logger(log_type, crawler).warning("rule_sheet is None! 10秒后重新获取")
  28. time.sleep(10)
  29. continue
  30. if index == 1:
  31. rule_dict = {
  32. "play_cnt": f"{rule_sheet[1][1]}{rule_sheet[1][2]}",
  33. "video_width": f"{rule_sheet[2][1]}{rule_sheet[2][2]}",
  34. "video_height": f"{rule_sheet[3][1]}{rule_sheet[3][2]}",
  35. "like_cnt": f"{rule_sheet[4][1]}{rule_sheet[4][2]}",
  36. "duration": f"{rule_sheet[5][1]}{rule_sheet[5][2]}",
  37. "download_cnt": f"{rule_sheet[6][1]}{rule_sheet[6][2]}",
  38. "publish_time": f"{rule_sheet[7][1]}{rule_sheet[7][2]}",
  39. }
  40. # for k, v in rule_dict.items():
  41. # Common.logger(log_type, crawler).info(f"{k}:{v}")
  42. return rule_dict
  43. elif index == 2:
  44. rule_dict = {
  45. "play_cnt": f"{rule_sheet[9][1]}{rule_sheet[9][2]}",
  46. "video_width": f"{rule_sheet[10][1]}{rule_sheet[10][2]}",
  47. "video_height": f"{rule_sheet[11][1]}{rule_sheet[11][2]}",
  48. "like_cnt": f"{rule_sheet[12][1]}{rule_sheet[12][2]}",
  49. "duration": f"{rule_sheet[13][1]}{rule_sheet[13][2]}",
  50. "download_cnt": f"{rule_sheet[14][1]}{rule_sheet[14][2]}",
  51. "publish_time": f"{rule_sheet[15][1]}{rule_sheet[15][2]}",
  52. }
  53. # for k, v in rule_dict.items():
  54. # Common.logger(log_type, crawler).info(f"{k}:{v}")
  55. return rule_dict
  56. except Exception as e:
  57. Common.logger(log_type, crawler).error(f"get_rule:{e}\n")
  58. @classmethod
  59. def download_rule(cls, video_dict, rule_dict):
  60. if eval(f"{video_dict['play_cnt']}{rule_dict['play_cnt']}") is True\
  61. and eval(f"{video_dict['video_width']}{rule_dict['video_width']}") is True\
  62. and eval(f"{video_dict['video_height']}{rule_dict['video_height']}") is True\
  63. and eval(f"{video_dict['like_cnt']}{rule_dict['like_cnt']}") is True\
  64. and eval(f"{video_dict['duration']}{rule_dict['duration']}") is True\
  65. and eval(f"{video_dict['publish_time']}{rule_dict['publish_time']}") is True:
  66. return True
  67. else:
  68. return False
  69. # 过滤词库
  70. @classmethod
  71. def filter_words(cls, log_type, crawler):
  72. try:
  73. while True:
  74. filter_words_sheet = Feishu.get_values_batch(log_type, crawler, 'HIKVvs')
  75. if filter_words_sheet is None:
  76. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{filter_words_sheet} 10秒钟后重试")
  77. continue
  78. filter_words_list = []
  79. for x in filter_words_sheet:
  80. for y in x:
  81. if y is None:
  82. pass
  83. else:
  84. filter_words_list.append(y)
  85. return filter_words_list
  86. except Exception as e:
  87. Common.logger(log_type, crawler).error(f'filter_words异常:{e}\n')
  88. # 万能标题
  89. @classmethod
  90. def random_title(cls, log_type, crawler):
  91. try:
  92. while True:
  93. random_title_sheet = Feishu.get_values_batch(log_type, crawler, '0DiyXe')
  94. if random_title_sheet is None:
  95. Common.logger(log_type, crawler).warning(f"filter_words_sheet:{random_title_sheet} 10秒钟后重试")
  96. continue
  97. random_title_list = []
  98. for x in random_title_sheet:
  99. for y in x:
  100. if y is None:
  101. pass
  102. else:
  103. random_title_list.append(y)
  104. return random.choice(random_title_list)
  105. except Exception as e:
  106. Common.logger(log_type, crawler).error(f'random_title:{e}\n')
  107. # 获取站外用户信息
  108. @classmethod
  109. def get_out_user_info(cls, log_type, crawler, out_uid):
  110. try:
  111. url = "https://www.kuaishou.com/graphql"
  112. payload = json.dumps({
  113. "operationName": "visionProfile",
  114. "variables": {
  115. "userId": str(out_uid)
  116. },
  117. "query": "query visionProfile($userId: String) {\n visionProfile(userId: $userId) {\n result\n hostName\n userProfile {\n ownerCount {\n fan\n photo\n follow\n photo_public\n __typename\n }\n profile {\n gender\n user_name\n user_id\n headurl\n user_text\n user_profile_bg_url\n __typename\n }\n isFollowing\n __typename\n }\n __typename\n }\n}\n"
  118. })
  119. headers = {
  120. # 'Cookie': f'kpf=PC_WEB; clientid=3; did=web_e2901e1c5a13c60af81ba88bc7a3ee24; userId={"".join(str(random.choice(range(1, 10))) for _ in range(10))}; kpn=KUAISHOU_VISION; kuaishou.server.web_st=ChZrdWFpc2hvdS5zZXJ2ZXIud2ViLnN0EqABE4wGjnJauApJelOpl9Xqo8TVDAyra7Pvo0rZtVgMSZxgVuw4Z6P2UtHv_CHOk2Ne2el1hdE_McCptWs8tRdtYlhXFlVOu8rQX7CwexzOBudJAfB3lDN8LPc4o4qHNwqFxy5J5j_WzdllbqMmaDUK9yUxX6XA-JFezzq9jvBwtGv7_hzB7pFrUcH39z0EYOQaZo5lDl-pE09Gw7wr8NvlZRoSdWlbobCW6oJxuQLJTUr9oj_uIiBhkeb1psaIIc3VwfYQ1UfvobrXAP_WpnRabE_3UZUBOygFMAE; kuaishou.server.web_ph=2b981e2051d7130c977fd31df97fe6f5ad54',
  121. 'Cookie': f'kpf=PC_WEB; clientid=3; did=web_e2901e1c5a13c60af81ba88bc7a3ee24; userId=3352428474; kpn=KUAISHOU_VISION; kuaishou.server.web_st=ChZrdWFpc2hvdS5zZXJ2ZXIud2ViLnN0EqABaRXtfRHlzKlQVj0Nm_M1G2wrIN1p6g3UTwfqfez6rkLVj6mPNt3RBAsLkyemMpvTLerPw0h41Q0lowqcImvIv5dlSGDEpQoj-VTAmOR2Suzm8vCRakG7XziAWyI0PXJKhvdXms-9Giy_4TnoniB49Oo3m7qXjXVBCzybcWS5BO90OLkhD30GYmGEnBBvkBI2oErJy3mNbafQdBQ6SxSUHhoS-1Rj5-IBBNoxoIePYcxZFs4oIiCvaT7sRn-zrF7X2ClPhfNh6lgClmH8MUjXszUfY_TPLCgFMAE; kuaishou.server.web_ph=1b62b98fc28bc23a42cd85240e1fd6025983',
  122. 'Referer': f'https://www.kuaishou.com/profile/{out_uid}',
  123. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  124. 'content-type': 'application/json',
  125. 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  126. 'Cache-Control': 'no-cache',
  127. 'Connection': 'keep-alive',
  128. 'Origin': 'https://www.kuaishou.com',
  129. 'Pragma': 'no-cache',
  130. 'Sec-Fetch-Dest': 'empty',
  131. 'Sec-Fetch-Mode': 'cors',
  132. 'Sec-Fetch-Site': 'same-origin',
  133. 'accept': '*/*',
  134. 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  135. 'sec-ch-ua-mobile': '?0',
  136. 'sec-ch-ua-platform': '"macOS"'
  137. }
  138. urllib3.disable_warnings()
  139. response = requests.post(url=url, headers=headers, data=payload, proxies=Common.tunnel_proxies(), verify=False)
  140. response.close()
  141. # Common.logger(log_type, crawler).info(f"get_out_user_info_response:{response.text}")
  142. if response.status_code != 200:
  143. Common.logger(log_type, crawler).warning(f"get_out_user_info_response:{response.text}\n")
  144. return
  145. elif 'data' not in response.json():
  146. Common.logger(log_type, crawler).warning(f"get_out_user_info_response:{response.json()}\n")
  147. return
  148. elif 'visionProfile' not in response.json()['data']:
  149. Common.logger(log_type, crawler).warning(f"get_out_user_info_response:{response.json()['data']}\n")
  150. return
  151. elif 'userProfile' not in response.json()['data']['visionProfile']:
  152. Common.logger(log_type, crawler).warning(f"get_out_user_info_response:{response.json()['data']['visionProfile']['userProfile']}\n")
  153. return
  154. else:
  155. userProfile = response.json()['data']['visionProfile']['userProfile']
  156. # Common.logger(log_type, crawler).info(f"userProfile:{userProfile}")
  157. try:
  158. out_fans_str = str(userProfile['ownerCount']['fan'])
  159. except Exception:
  160. out_fans_str = "0"
  161. try:
  162. out_follow_str = str(userProfile['ownerCount']['follow'])
  163. except Exception:
  164. out_follow_str = "0"
  165. try:
  166. out_avatar_url = userProfile['profile']['headurl']
  167. except Exception:
  168. out_avatar_url = ""
  169. Common.logger(log_type, crawler).info(f"out_fans_str:{out_fans_str}")
  170. Common.logger(log_type, crawler).info(f"out_follow_str:{out_follow_str}")
  171. Common.logger(log_type, crawler).info(f"out_avatar_url:{out_avatar_url}")
  172. if "万" in out_fans_str:
  173. out_fans = int(float(out_fans_str.split("万")[0]) * 10000)
  174. else:
  175. out_fans = int(out_fans_str.replace(",", ""))
  176. if "万" in out_follow_str:
  177. out_follow = int(float(out_follow_str.split("万")[0]) * 10000)
  178. else:
  179. out_follow = int(out_follow_str.replace(",", ""))
  180. out_user_dict = {
  181. "out_fans": out_fans,
  182. "out_follow": out_follow,
  183. "out_avatar_url": out_avatar_url
  184. }
  185. Common.logger(log_type, crawler).info(f"out_user_dict:{out_user_dict}")
  186. return out_user_dict
  187. except Exception as e:
  188. Common.logger(log_type, crawler).error(f"get_out_user_info:{e}\n")
  189. # 获取用户信息列表
  190. @classmethod
  191. def get_user_list(cls, log_type, crawler, sheetid, env, machine):
  192. try:
  193. while True:
  194. user_sheet = Feishu.get_values_batch(log_type, crawler, sheetid)
  195. if user_sheet is None:
  196. Common.logger(log_type, crawler).warning(f"user_sheet:{user_sheet} 10秒钟后重试")
  197. continue
  198. our_user_list = []
  199. for i in range(1, len(user_sheet)):
  200. # for i in range(1, 2):
  201. out_uid = user_sheet[i][2]
  202. user_name = user_sheet[i][3]
  203. our_uid = user_sheet[i][6]
  204. our_user_link = user_sheet[i][7]
  205. if out_uid is None or user_name is None:
  206. Common.logger(log_type, crawler).info("空行\n")
  207. else:
  208. Common.logger(log_type, crawler).info(f"正在更新 {user_name} 用户信息\n")
  209. if our_uid is None:
  210. out_user_info = cls.get_out_user_info(log_type, crawler, out_uid)
  211. out_user_dict = {
  212. "out_uid": out_uid,
  213. "user_name": user_name,
  214. "out_avatar_url": out_user_info["out_avatar_url"],
  215. "out_create_time": '',
  216. "out_tag": '',
  217. "out_play_cnt": 0,
  218. "out_fans": out_user_info["out_fans"],
  219. "out_follow": out_user_info["out_follow"],
  220. "out_friend": 0,
  221. "out_like": 0,
  222. "platform": cls.platform,
  223. "tag": cls.tag,
  224. }
  225. our_user_dict = Users.create_user(log_type=log_type, crawler=crawler,
  226. out_user_dict=out_user_dict, env=env, machine=machine)
  227. our_uid = our_user_dict['our_uid']
  228. our_user_link = our_user_dict['our_user_link']
  229. Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}',
  230. [[our_uid, our_user_link]])
  231. Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!\n')
  232. our_user_list.append(our_user_dict)
  233. else:
  234. our_user_dict = {
  235. 'out_uid': out_uid,
  236. 'user_name': user_name,
  237. 'our_uid': our_uid,
  238. 'our_user_link': our_user_link,
  239. }
  240. our_user_list.append(our_user_dict)
  241. return our_user_list
  242. except Exception as e:
  243. Common.logger(log_type, crawler).error(f'get_user_list:{e}\n')
  244. # 处理视频标题
  245. @classmethod
  246. def video_title(cls, log_type, crawler, title):
  247. title_split1 = title.split(" #")
  248. if title_split1[0] != "":
  249. title1 = title_split1[0]
  250. else:
  251. title1 = title_split1[-1]
  252. title_split2 = title1.split(" #")
  253. if title_split2[0] != "":
  254. title2 = title_split2[0]
  255. else:
  256. title2 = title_split2[-1]
  257. title_split3 = title2.split("@")
  258. if title_split3[0] != "":
  259. title3 = title_split3[0]
  260. else:
  261. title3 = title_split3[-1]
  262. video_title = title3.strip().replace("\n", "") \
  263. .replace("/", "").replace("快手", "").replace(" ", "") \
  264. .replace(" ", "").replace("&NBSP", "").replace("\r", "") \
  265. .replace("#", "").replace(".", "。").replace("\\", "") \
  266. .replace(":", "").replace("*", "").replace("?", "") \
  267. .replace("?", "").replace('"', "").replace("<", "") \
  268. .replace(">", "").replace("|", "").replace("@", "")[:40]
  269. if video_title.replace(" ", "") == "" or video_title == "。。。" or video_title == "...":
  270. return cls.random_title(log_type, crawler)
  271. else:
  272. return video_title
  273. @classmethod
  274. def get_videoList(cls, log_type, crawler, strategy, our_uid, out_uid, oss_endpoint, env, machine):
  275. try:
  276. download_cnt_1, download_cnt_2 = 0, 0
  277. pcursor = ""
  278. while True:
  279. rule_dict_1 = cls.get_rule(log_type, crawler, 1)
  280. rule_dict_2 = cls.get_rule(log_type, crawler, 2)
  281. if rule_dict_1 is None or rule_dict_2 is None:
  282. Common.logger(log_type, crawler).warning(f"rule_dict is None, 10秒后重试")
  283. time.sleep(10)
  284. else:
  285. break
  286. while True:
  287. if download_cnt_1 >=int(rule_dict_1['download_cnt'].replace("=", "")[-1].replace("<", "")[-1].replace(">", "")[-1]) and download_cnt_2 >= int(rule_dict_2['download_cnt'].replace("=", "")[-1].replace("<", "")[-1].replace(">", "")[-1]):
  288. Common.logger(log_type, crawler).info(f"规则1已下载{download_cnt_1}条视频,规则2已下载{download_cnt_2}条视频\n")
  289. return
  290. url = "https://www.kuaishou.com/graphql"
  291. payload = json.dumps({
  292. "operationName": "visionProfilePhotoList",
  293. "variables": {
  294. "userId": out_uid,
  295. "pcursor": pcursor,
  296. "page": "profile"
  297. },
  298. "query": "fragment photoContent on PhotoEntity {\n id\n duration\n caption\n originCaption\n likeCount\n viewCount\n realLikeCount\n coverUrl\n photoUrl\n photoH265Url\n manifest\n manifestH265\n videoResource\n coverUrls {\n url\n __typename\n }\n timestamp\n expTag\n animatedCoverUrl\n distance\n videoRatio\n liked\n stereoType\n profileUserTopPhoto\n musicBlocked\n __typename\n}\n\nfragment feedContent on Feed {\n type\n author {\n id\n name\n headerUrl\n following\n headerUrls {\n url\n __typename\n }\n __typename\n }\n photo {\n ...photoContent\n __typename\n }\n canAddComment\n llsid\n status\n currentPcursor\n tags {\n type\n name\n __typename\n }\n __typename\n}\n\nquery visionProfilePhotoList($pcursor: String, $userId: String, $page: String, $webPageArea: String) {\n visionProfilePhotoList(pcursor: $pcursor, userId: $userId, page: $page, webPageArea: $webPageArea) {\n result\n llsid\n webPageArea\n feeds {\n ...feedContent\n __typename\n }\n hostName\n pcursor\n __typename\n }\n}\n"
  299. })
  300. headers = {
  301. 'Cookie': f'kpf=PC_WEB; clientid=3; did=web_e2901e1c5a13c60af81ba88bc7a3ee24; userId={"".join(str(random.choice(range(1, 10))) for _ in range(10))}; kuaishou.server.web_st=ChZrdWFpc2hvdS5zZXJ2ZXIud2ViLnN0EqABOLgYYcIJ5ilxU46Jc-HLWThY8sppX3V0htC_KhSGOzAjP2hAOdegzfkZGAxS5rf6rCBS487FkxfYzLkV__I6b1lK16rDjvv94Kkoo4z7mgf8y8rFgWoqrp81JAWTtx00y-wrc1XXPf9RAVQoET70wWaeNG2r5bxtZEiNwpK_zPi0ZdUo0BW13dFKfVssAy2xKYh0UlJ8VSd_vBvyMKSxVBoSf061Kc3w5Nem7YdpVBmH39ceIiBpiGioLzbZqlHiSbwkH_LhUhNXz3o7LITj098KUytk2CgFMAE; kuaishou.server.web_ph=f1033957981996a7d50e849a9ded4cf4adff; kpn=KUAISHOU_VISION',
  302. 'Referer': f'https://www.kuaishou.com/profile/{out_uid}',
  303. 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/110.0.0.0 Safari/537.36 Edg/110.0.1587.41',
  304. 'content-type': 'application/json',
  305. # 'accept': '*/*',
  306. # 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6',
  307. # 'Cache-Control': 'no-cache',
  308. # 'Connection': 'keep-alive',
  309. # 'Origin': 'https://www.kuaishou.com',
  310. # 'Pragma': 'no-cache',
  311. # 'Sec-Fetch-Dest': 'empty',
  312. # 'Sec-Fetch-Mode': 'cors',
  313. # 'Sec-Fetch-Site': 'same-origin',
  314. # 'sec-ch-ua': '"Chromium";v="110", "Not A(Brand";v="24", "Microsoft Edge";v="110"',
  315. # 'sec-ch-ua-mobile': '?0',
  316. # 'sec-ch-ua-platform': '"macOS"'
  317. }
  318. urllib3.disable_warnings()
  319. response = requests.post(url=url, headers=headers, data=payload, proxies=Common.tunnel_proxies(), verify=False)
  320. response.close()
  321. Common.logger(log_type, crawler).info(f"get_videoList:{response.text}\n")
  322. if response.status_code != 200:
  323. Common.logger(log_type, crawler).warning(f"get_videoList_response:{response.text}\n")
  324. return
  325. elif 'data' not in response.json():
  326. Common.logger(log_type, crawler).warning(f"get_videoList_response:{response.json()}\n")
  327. return
  328. elif 'visionProfilePhotoList' not in response.json()['data']:
  329. Common.logger(log_type, crawler).warning(f"get_videoList_response:{response.json()['data']}\n")
  330. return
  331. elif 'feeds' not in response.json()['data']['visionProfilePhotoList']:
  332. Common.logger(log_type, crawler).warning(f"get_videoList_response:{response.json()['data']['visionProfilePhotoList']}\n")
  333. return
  334. elif len(response.json()['data']['visionProfilePhotoList']['feeds']) == 0:
  335. Common.logger(log_type, crawler).info("没有更多视频啦 ~\n")
  336. return
  337. else:
  338. feeds = response.json()['data']['visionProfilePhotoList']['feeds']
  339. pcursor = response.json()['data']['visionProfilePhotoList']['pcursor']
  340. # Common.logger(log_type, crawler).info(f"feeds0: {feeds}\n")
  341. for i in range(len(feeds)):
  342. if 'photo' not in feeds[i]:
  343. Common.logger(log_type, crawler).warning(f"get_videoList:{feeds[i]}\n")
  344. break
  345. # video_title
  346. if 'caption' not in feeds[i]['photo']:
  347. video_title = cls.random_title(log_type, crawler)
  348. elif feeds[i]['photo']['caption'].strip() == "":
  349. video_title = cls.random_title(log_type, crawler)
  350. else:
  351. video_title = cls.video_title(log_type, crawler, feeds[i]['photo']['caption'])
  352. if 'videoResource' not in feeds[i]['photo'] \
  353. and 'manifest' not in feeds[i]['photo']\
  354. and 'manifestH265'not in feeds[i]['photo']:
  355. Common.logger(log_type, crawler).warning(f"get_videoList:{feeds[i]['photo']}\n")
  356. break
  357. videoResource = feeds[i]['photo']['videoResource']
  358. if 'h264' not in videoResource and 'hevc' not in videoResource:
  359. Common.logger(log_type, crawler).warning(f"get_videoList:{videoResource}\n")
  360. break
  361. # video_id
  362. if 'h264' in videoResource and 'videoId' in videoResource['h264']:
  363. video_id = videoResource['h264']['videoId']
  364. elif 'hevc' in videoResource and 'videoId' in videoResource['hevc']:
  365. video_id = videoResource['hevc']['videoId']
  366. else:
  367. video_id = ""
  368. # play_cnt
  369. if 'viewCount' not in feeds[i]['photo']:
  370. play_cnt = 0
  371. else:
  372. play_cnt = int(feeds[i]['photo']['viewCount'])
  373. # like_cnt
  374. if 'realLikeCount' not in feeds[i]['photo']:
  375. like_cnt = 0
  376. else:
  377. like_cnt = feeds[i]['photo']['realLikeCount']
  378. # publish_time
  379. if 'timestamp' not in feeds[i]['photo']:
  380. publish_time_stamp = 0
  381. publish_time_str = ''
  382. publish_time = 0
  383. else:
  384. publish_time_stamp = int(int(feeds[i]['photo']['timestamp'])/1000)
  385. publish_time_str = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publish_time_stamp))
  386. publish_time = int((int(time.time()) - publish_time_stamp) / (3600*24))
  387. # duration
  388. if 'duration' not in feeds[i]['photo']:
  389. duration = 0
  390. else:
  391. duration = int(int(feeds[i]['photo']['duration'])/1000)
  392. # video_width / video_height / video_url
  393. mapping = {}
  394. for item in ['width', 'height', 'url']:
  395. try:
  396. val = str(videoResource['h264']['adaptationSet'][0]['representation'][0][item])
  397. except Exception:
  398. val = str(videoResource['hevc']['adaptationSet'][0]['representation'][0][item])
  399. except:
  400. val = ''
  401. mapping[item] = val
  402. video_width = int(mapping['width']) if mapping['width'] != '' else 0
  403. video_height = int(mapping['height']) if mapping['height'] != '' else 0
  404. video_url = mapping['url']
  405. # cover_url
  406. if 'coverUrl' not in feeds[i]['photo']:
  407. cover_url = ""
  408. else:
  409. cover_url = feeds[i]['photo']['coverUrl']
  410. # user_name / avatar_url
  411. try:
  412. user_name = feeds[i]['author']['name']
  413. avatar_url = feeds[i]['author']['headerUrl']
  414. except Exception:
  415. user_name = ''
  416. avatar_url = ''
  417. video_dict = {'video_title': video_title,
  418. 'video_id': video_id,
  419. 'play_cnt': play_cnt,
  420. 'comment_cnt': 0,
  421. 'like_cnt': like_cnt,
  422. 'share_cnt': 0,
  423. 'video_width': video_width,
  424. 'video_height': video_height,
  425. 'duration': duration,
  426. 'publish_time': publish_time,
  427. 'publish_time_stamp': publish_time_stamp,
  428. 'publish_time_str': publish_time_str,
  429. 'user_name': user_name,
  430. 'user_id': out_uid,
  431. 'avatar_url': avatar_url,
  432. 'cover_url': cover_url,
  433. 'video_url': video_url,
  434. 'session': f"kuaishou{int(time.time())}"}
  435. rule_1 = cls.download_rule(video_dict, rule_dict_1)
  436. Common.logger(log_type, crawler).info(f"video_title:{video_title}")
  437. Common.logger(log_type, crawler).info(f"video_id:{video_id}\n")
  438. Common.logger(log_type, crawler).info(f"play_cnt:{video_dict['play_cnt']}{rule_dict_1['play_cnt']}, {eval(str(video_dict['play_cnt']) + str(rule_dict_1['play_cnt']))}")
  439. Common.logger(log_type, crawler).info(f"like_cnt:{video_dict['like_cnt']}{rule_dict_1['like_cnt']}, {eval(str(video_dict['like_cnt']) + str(rule_dict_1['like_cnt']))}")
  440. Common.logger(log_type, crawler).info(f"video_width:{video_dict['video_width']}{rule_dict_1['video_width']}, {eval(str(video_dict['video_width']) + str(rule_dict_1['video_width']))}")
  441. Common.logger(log_type, crawler).info(f"video_height:{video_dict['video_height']}{rule_dict_1['video_height']}, {eval(str(video_dict['video_height']) + str(rule_dict_1['video_height']))}")
  442. Common.logger(log_type, crawler).info(f"duration:{video_dict['duration']}{rule_dict_1['duration']}, {eval(str(video_dict['duration']) + str(rule_dict_1['duration']))}")
  443. Common.logger(log_type, crawler).info(f"publish_time:{video_dict['publish_time']}{rule_dict_1['publish_time']}, {eval(str(video_dict['publish_time']) + str(rule_dict_1['publish_time']))}")
  444. Common.logger(log_type, crawler).info(f"rule_1:{rule_1}\n")
  445. rule_2 = cls.download_rule(video_dict, rule_dict_2)
  446. Common.logger(log_type, crawler).info(f"play_cnt:{video_dict['play_cnt']}{rule_dict_2['play_cnt']}, {eval(str(video_dict['play_cnt']) + str(rule_dict_2['play_cnt']))}")
  447. Common.logger(log_type, crawler).info(f"like_cnt:{video_dict['like_cnt']}{rule_dict_2['like_cnt']}, {eval(str(video_dict['like_cnt']) + str(rule_dict_2['like_cnt']))}")
  448. Common.logger(log_type, crawler).info(f"video_width:{video_dict['video_width']}{rule_dict_2['video_width']}, {eval(str(video_dict['video_width']) + str(rule_dict_2['video_width']))}")
  449. Common.logger(log_type, crawler).info(f"video_height:{video_dict['video_height']}{rule_dict_2['video_height']}, {eval(str(video_dict['video_height']) + str(rule_dict_2['video_height']))}")
  450. Common.logger(log_type, crawler).info(f"duration:{video_dict['duration']}{rule_dict_2['duration']}, {eval(str(video_dict['duration']) + str(rule_dict_2['duration']))}")
  451. Common.logger(log_type, crawler).info(f"publish_time:{video_dict['publish_time']}{rule_dict_2['publish_time']}, {eval(str(video_dict['publish_time']) + str(rule_dict_2['publish_time']))}")
  452. Common.logger(log_type, crawler).info(f"rule_2:{rule_2}\n")
  453. if video_title == "" or video_url == "":
  454. Common.logger(log_type, crawler).info("无效视频\n")
  455. break
  456. elif rule_1 is True:
  457. if download_cnt_1 < int(rule_dict_1['download_cnt'].replace("=", "")[-1].replace("<", "")[-1].replace(">", "")[-1]):
  458. download_finished = cls.download_publish(log_type=log_type,
  459. crawler=crawler,
  460. strategy=strategy,
  461. video_dict=video_dict,
  462. rule_dict=rule_dict_1,
  463. our_uid=our_uid,
  464. oss_endpoint=oss_endpoint,
  465. env=env,
  466. machine=machine)
  467. if download_finished is True:
  468. download_cnt_1 += 1
  469. elif rule_2 is True:
  470. if download_cnt_2 < int(rule_dict_2['download_cnt'].replace("=", "")[-1].replace("<", "")[-1].replace(">", "")[-1]):
  471. download_finished = cls.download_publish(log_type=log_type,
  472. crawler=crawler,
  473. strategy=strategy,
  474. video_dict=video_dict,
  475. rule_dict=rule_dict_2,
  476. our_uid=our_uid,
  477. oss_endpoint=oss_endpoint,
  478. env=env,
  479. machine=machine)
  480. if download_finished is True:
  481. download_cnt_2 += 1
  482. else:
  483. Common.logger(log_type, crawler).info("不满足下载规则\n")
  484. # Common.logger(log_type, crawler).info(f"feeds: {feeds}\n")
  485. if pcursor == "no_more":
  486. Common.logger(log_type, crawler).info("已经到底了,没有更多内容了\n")
  487. return
  488. except Exception as e:
  489. Common.logger(log_type, crawler).error(f"get_videoList:{e}\n")
  490. @classmethod
  491. def repeat_video(cls, log_type, crawler, video_id, env, machine):
  492. sql = f""" select * from crawler_video where platform="{cls.platform}" and out_video_id="{video_id}"; """
  493. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env, machine)
  494. return len(repeat_video)
  495. @classmethod
  496. def download_publish(cls, log_type, crawler, strategy, video_dict, rule_dict, our_uid, oss_endpoint, env, machine):
  497. try:
  498. download_finished = False
  499. if cls.repeat_video(log_type, crawler, video_dict['video_id'], env, machine) != 0:
  500. Common.logger(log_type, crawler).info('视频已下载\n')
  501. elif video_dict['video_id'] in [x for y in Feishu.get_values_batch(log_type, crawler, "3cd128") for x in y]:
  502. Common.logger(log_type, crawler).info('视频已下载\n')
  503. elif any(word if word in video_dict['video_title'] else False for word in cls.filter_words(log_type, crawler)) is True:
  504. Common.logger(log_type, crawler).info('标题已中过滤词\n')
  505. else:
  506. # 下载封面
  507. Common.download_method(log_type=log_type, crawler=crawler, text='cover', title=video_dict['video_title'], url=video_dict['cover_url'])
  508. # 下载视频
  509. Common.download_method(log_type=log_type, crawler=crawler, text='video', title=video_dict['video_title'], url=video_dict['video_url'])
  510. # 保存视频信息至txt
  511. Common.save_video_info(log_type=log_type, crawler=crawler, video_dict=video_dict)
  512. # 上传视频
  513. Common.logger(log_type, crawler).info("开始上传视频...")
  514. our_video_id = Publish.upload_and_publish(log_type=log_type,
  515. crawler=crawler,
  516. strategy=strategy,
  517. our_uid=our_uid,
  518. env=env,
  519. oss_endpoint=oss_endpoint)
  520. if env == 'dev':
  521. our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  522. else:
  523. our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
  524. Common.logger(log_type, crawler).info("视频上传完成")
  525. if our_video_id is None:
  526. Common.logger(log_type, crawler).warning(f"our_video_id:{our_video_id} 删除该视频文件夹")
  527. # 删除视频文件夹
  528. shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}")
  529. return download_finished
  530. # 视频信息保存数据库
  531. insert_sql = f""" insert into crawler_video(video_id,
  532. user_id,
  533. out_user_id,
  534. platform,
  535. strategy,
  536. out_video_id,
  537. video_title,
  538. cover_url,
  539. video_url,
  540. duration,
  541. publish_time,
  542. play_cnt,
  543. crawler_rule,
  544. width,
  545. height)
  546. values({our_video_id},
  547. {our_uid},
  548. "{video_dict['user_id']}",
  549. "{cls.platform}",
  550. "定向爬虫策略",
  551. "{video_dict['video_id']}",
  552. "{video_dict['video_title']}",
  553. "{video_dict['cover_url']}",
  554. "{video_dict['video_url']}",
  555. {int(video_dict['duration'])},
  556. "{video_dict['publish_time_str']}",
  557. {int(video_dict['play_cnt'])},
  558. '{json.dumps(rule_dict)}',
  559. {int(video_dict['video_width'])},
  560. {int(video_dict['video_height'])}) """
  561. Common.logger(log_type, crawler).info(f"insert_sql:{insert_sql}")
  562. MysqlHelper.update_values(log_type, crawler, insert_sql, env, machine)
  563. Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
  564. # 视频写入飞书
  565. Feishu.insert_columns(log_type, 'kuaishou', "fYdA8F", "ROWS", 1, 2)
  566. upload_time = int(time.time())
  567. values = [[our_video_id,
  568. time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(upload_time)),
  569. "定向榜",
  570. str(video_dict['video_id']),
  571. video_dict['video_title'],
  572. our_video_link,
  573. video_dict['play_cnt'],
  574. video_dict['comment_cnt'],
  575. video_dict['like_cnt'],
  576. video_dict['share_cnt'],
  577. video_dict['duration'],
  578. f"{video_dict['video_width']}*{video_dict['video_height']}",
  579. video_dict['publish_time_str'],
  580. video_dict['user_name'],
  581. video_dict['user_id'],
  582. video_dict['avatar_url'],
  583. video_dict['cover_url'],
  584. video_dict['video_url']]]
  585. time.sleep(1)
  586. Feishu.update_values(log_type, 'kuaishou', "fYdA8F", "E2:Z2", values)
  587. Common.logger(log_type, crawler).info(f"视频已保存至云文档\n")
  588. download_finished = True
  589. return download_finished
  590. except Exception as e:
  591. Common.logger(log_type, crawler).error(f"download_publish:{e}\n")
  592. @classmethod
  593. def get_follow_videos(cls, log_type, crawler, strategy, oss_endpoint, env, machine):
  594. user_list = cls.get_user_list(log_type=log_type, crawler=crawler, sheetid="bTSzxW", env=env, machine=machine)
  595. for user in user_list:
  596. out_uid = user["out_uid"]
  597. user_name = user["user_name"]
  598. our_uid = user["our_uid"]
  599. Common.logger(log_type, crawler).info(f"开始抓取 {user_name} 用户主页视频\n")
  600. cls.get_videoList(log_type=log_type,
  601. crawler=crawler,
  602. strategy=strategy,
  603. our_uid=our_uid,
  604. out_uid=out_uid,
  605. oss_endpoint=oss_endpoint,
  606. env=env,
  607. machine=machine)
  608. time.sleep(3)
  609. if __name__ == "__main__":
  610. # print(Follow.filter_words("follow", "kuaishou"))
  611. # print(Follow.random_title("follow", "kuaishou"))
  612. # Follow.get_user_list("follow", "kuaishou", "2OLxLr", "dev", "local")
  613. # Follow.get_videoList(log_type="follow",
  614. # crawler="kuaishou",
  615. # strategy="定向爬虫策略",
  616. # our_uid="6282431",
  617. # out_uid="3xws7ydsnmp5mgq",
  618. # oss_endpoint="out",
  619. # env="dev",
  620. # machine="local")
  621. # Follow.get_rule("follow", "kuaishou", 1)
  622. # Follow.get_rule("follow", "kuaishou", 2)
  623. print(Follow.get_out_user_info("follow", "kuaishou", "3xgh4ja9be3wcaw"))
  624. print(Follow.get_out_user_info("follow", "kuaishou", "3x5wgjhfc7tx8ue"))
  625. pass