piaopiaoquan_sift.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380
  1. # -*- coding: utf-8 -*-
  2. # @Author: zhangyong
  3. # @Time: 2023/11/30
  4. import json
  5. import os
  6. import sys
  7. import time
  8. import uuid
  9. from hashlib import md5
  10. from appium import webdriver
  11. from appium.webdriver.extensions.android.nativekey import AndroidKey
  12. from bs4 import BeautifulSoup
  13. from selenium.common.exceptions import NoSuchElementException
  14. from selenium.webdriver.common.by import By
  15. import multiprocessing
  16. sys.path.append(os.getcwd())
  17. from common import AliyunLogger, PiaoQuanPipeline, get_redirect_url
  18. from common.common import Common
  19. from common.mq import MQ
  20. class PPQSiftRecommend:
  21. env = None
  22. driver = None
  23. log_type = None
  24. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  25. self.mq = None
  26. self.platform = "piaopiaoquan-sift"
  27. self.download_cnt = 0
  28. self.element_list = []
  29. self.count = 0
  30. self.swipe_count = 0
  31. self.log_type = log_type
  32. self.crawler = crawler
  33. self.env = env
  34. self.rule_dict = rule_dict
  35. self.our_uid = our_uid
  36. if self.env == "dev":
  37. chromedriverExecutable = "/Users/tzld/Downloads/chromedriver_v111/chromedriver"
  38. else:
  39. chromedriverExecutable = "/Users/tzld/Downloads/chromedriver_v111/chromedriver"
  40. Common.logger(self.log_type, self.crawler).info("启动微信")
  41. # 微信的配置文件
  42. caps = {
  43. "platformName": "Android",
  44. "devicesName": "Android",
  45. # "platformVersion": "11",
  46. # "udid": "emulator-5554",
  47. "appPackage": "com.tencent.mm",
  48. "appActivity": ".ui.LauncherUI",
  49. "autoGrantPermissions": "true",
  50. "noReset": True,
  51. "resetkeyboard": True,
  52. "unicodekeyboard": True,
  53. "showChromedriverLog": True,
  54. "printPageSourceOnFailure": True,
  55. "recreateChromeDriverSessions": True,
  56. "enableWebviewDetailsCollection": True,
  57. "setWebContentsDebuggingEnabled": True,
  58. "newCommandTimeout": 6000,
  59. "automationName": "UiAutomator2",
  60. "chromedriverExecutable": chromedriverExecutable,
  61. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  62. }
  63. try:
  64. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  65. except Exception as e:
  66. print(e)
  67. AliyunLogger.logging(
  68. code="3002",
  69. platform=self.platform,
  70. mode=self.log_type,
  71. env=self.env,
  72. message=f'appium 启动异常: {e}'
  73. )
  74. return
  75. self.driver.implicitly_wait(30)
  76. for i in range(120):
  77. try:
  78. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  79. Common.logger(self.log_type, self.crawler).info("微信启动成功")
  80. AliyunLogger.logging(
  81. code="1000",
  82. platform=self.platform,
  83. mode=self.log_type,
  84. env=self.env,
  85. message="启动微信成功"
  86. )
  87. break
  88. else:
  89. pass
  90. except NoSuchElementException:
  91. AliyunLogger.logging(
  92. code="3001",
  93. platform=self.platform,
  94. mode=self.log_type,
  95. env=self.env,
  96. message="打开微信异常"
  97. )
  98. time.sleep(1)
  99. Common.logger(self.log_type, self.crawler).info("点击漂漂圈精选视频群聊")
  100. self.driver.find_elements(By.XPATH, '//*[@text="漂漂圈精选视频"]')[-1].click()
  101. time.sleep(5)
  102. Common.logger(self.log_type, self.crawler).info('点击"漂漂圈丨福年"卡片')
  103. self.driver.find_elements(By.XPATH, '//*[@text="漂漂圈丨福年"]')[-1].click()
  104. AliyunLogger.logging(
  105. code="1000",
  106. platform=self.platform,
  107. env=self.env,
  108. mode=self.log_type,
  109. message="打开漂漂圈丨福年卡片成功进入小程序"
  110. )
  111. time.sleep(5)
  112. self.driver.press_keycode(AndroidKey.BACK)
  113. AliyunLogger.logging(
  114. code="1000",
  115. platform=self.platform,
  116. env=self.env,
  117. mode=self.log_type,
  118. message="进入漂漂圈丨福年 更多热门成功"
  119. )
  120. time.sleep(5)
  121. self.get_videoList()
  122. time.sleep(1)
  123. self.driver.quit()
  124. def search_elements(self, xpath):
  125. time.sleep(1)
  126. windowHandles = self.driver.window_handles
  127. for handle in windowHandles:
  128. self.driver.switch_to.window(handle)
  129. time.sleep(1)
  130. try:
  131. elements = self.driver.find_elements(By.XPATH, xpath)
  132. if elements:
  133. return elements
  134. except NoSuchElementException:
  135. pass
  136. def check_to_applet(self, xpath):
  137. time.sleep(1)
  138. webViews = self.driver.contexts
  139. self.driver.switch_to.context(webViews[-1])
  140. windowHandles = self.driver.window_handles
  141. for handle in windowHandles:
  142. self.driver.switch_to.window(handle)
  143. time.sleep(1)
  144. try:
  145. self.driver.find_element(By.XPATH, xpath)
  146. Common.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  147. AliyunLogger.logging(
  148. code="1000",
  149. platform=self.platform,
  150. mode=self.log_type,
  151. env=self.env,
  152. message="成功切换到 webview"
  153. )
  154. return
  155. except NoSuchElementException:
  156. time.sleep(1)
  157. def swipe_up(self):
  158. self.search_elements('//*[@class="single--title"]')
  159. size = self.driver.get_window_size()
  160. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  161. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  162. self.swipe_count += 1
  163. def get_video_url(self, video_title_element):
  164. for i in range(3):
  165. self.search_elements('//*[@class="single--title"]')
  166. Common.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  167. time.sleep(1)
  168. Common.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  169. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  170. video_title_element[0])
  171. time.sleep(3)
  172. Common.logger(self.log_type, self.crawler).info("点击标题")
  173. video_title_element[0].click()
  174. self.check_to_applet(xpath=r'//wx-video[@class="infos--flex-box infos--title-wrap"]')
  175. Common.logger(self.log_type, self.crawler).info("点击标题完成")
  176. time.sleep(10)
  177. video_url_elements = self.search_elements(
  178. '//wx-video[@class="dynamic-index--video-item dynamic-index--video"]')
  179. if video_url_elements:
  180. return video_url_elements[0].get_attribute("src")
  181. def parse_detail(self, index):
  182. page_source = self.driver.page_source
  183. soup = BeautifulSoup(page_source, 'html.parser')
  184. soup.prettify()
  185. video_list = soup.findAll(name="wx-view", attrs={"class": "single--dynamic-item"})
  186. element_list = [i for i in video_list][index:]
  187. return element_list[0]
  188. def get_video_info_2(self, video_element):
  189. Common.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  190. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  191. self.count = 0
  192. self.download_cnt = 0
  193. self.element_list = []
  194. return
  195. self.count += 1
  196. Common.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  197. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  198. trace_id = self.crawler + str(uuid.uuid1())
  199. AliyunLogger.logging(
  200. code="1001",
  201. platform=self.platform,
  202. mode=self.log_type,
  203. env=self.env,
  204. trace_id=trace_id,
  205. message="扫描到一条视频",
  206. )
  207. # 标题
  208. video_title = video_element.find("wx-view", class_="single--title").text
  209. # 播放量字符串
  210. play_str = video_element.find("wx-view", class_="single--favor-text").text
  211. user_name = video_element.find("wx-view", class_="single--nick").text
  212. # 头像 URL
  213. avatar_url = video_element.find("wx-image", class_="single--avatar-image")["src"]
  214. # 封面 URL
  215. cover_url = video_element.find("wx-image", class_="single--image")["src"]
  216. play_cnt = play_str.replace("+", "").replace("次播放", "")
  217. if "万" in play_cnt:
  218. play_cnt = int(play_cnt.split("万")[0]) * 10000
  219. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  220. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  221. video_dict = {
  222. "video_title": video_title,
  223. "video_id": out_video_id,
  224. 'out_video_id': out_video_id,
  225. "duration_str": '',
  226. "duration": 0,
  227. "play_str": play_str,
  228. "play_cnt": play_cnt,
  229. "like_str": "",
  230. "like_cnt": 0,
  231. "comment_cnt": 0,
  232. "share_cnt": 0,
  233. "user_name": user_name,
  234. "user_id": out_user_id,
  235. 'publish_time_stamp': int(time.time()),
  236. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  237. 'update_time_stamp': int(time.time()),
  238. "avatar_url": avatar_url,
  239. "cover_url": cover_url,
  240. "session": f"piaopiaoquan_sift-{int(time.time())}"
  241. }
  242. pipeline = PiaoQuanPipeline(
  243. platform=self.crawler,
  244. mode=self.log_type,
  245. item=video_dict,
  246. rule_dict=self.rule_dict,
  247. env=self.env,
  248. trace_id=trace_id
  249. )
  250. flag = pipeline.process_item()
  251. if flag:
  252. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  253. if video_title_element is None:
  254. return
  255. Common.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  256. AliyunLogger.logging(
  257. code="1000",
  258. platform=self.platform,
  259. mode=self.log_type,
  260. env=self.env,
  261. message="点击标题,进入视频详情页",
  262. )
  263. video_url = self.get_video_url(video_title_element)
  264. video_url = get_redirect_url(video_url)
  265. if video_url is None:
  266. self.driver.press_keycode(AndroidKey.BACK)
  267. time.sleep(5)
  268. return
  269. video_dict['video_url'] = video_url
  270. video_dict["platform"] = self.crawler
  271. video_dict["strategy"] = self.log_type
  272. video_dict["out_video_id"] = video_dict["video_id"]
  273. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  274. video_dict["user_id"] = self.our_uid
  275. video_dict["publish_time"] = video_dict["publish_time_str"]
  276. print(video_dict)
  277. self.driver.press_keycode(AndroidKey.BACK)
  278. self.mq.send_msg(video_dict)
  279. self.download_cnt += 1
  280. time.sleep(5)
  281. def get_video_info(self, video_element):
  282. try:
  283. self.get_video_info_2(video_element)
  284. except Exception as e:
  285. Common.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  286. AliyunLogger.logging(
  287. code="3001",
  288. platform=self.platform,
  289. mode=self.log_type,
  290. env=self.env,
  291. message=f"抓取单条视频异常:{e}\n"
  292. )
  293. def get_videoList(self):
  294. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  295. self.driver.implicitly_wait(20)
  296. # 切换到 web_view
  297. self.check_to_applet(xpath='//*[@class="single--item-content"]')
  298. print("切换到 webview 成功")
  299. time.sleep(1)
  300. if self.search_elements('//*[@class="single--item-content"]') is None:
  301. Common.logger(self.log_type, self.crawler).info("窗口已销毁\n")
  302. AliyunLogger.logging(
  303. code="3000",
  304. platform=self.platform,
  305. mode=self.log_type,
  306. env=self.env,
  307. message="窗口已销毁"
  308. )
  309. self.count = 0
  310. self.download_cnt = 0
  311. self.element_list = []
  312. return
  313. print("开始获取视频信息")
  314. for i in range(50):
  315. print("下滑{}次".format(i))
  316. element = self.parse_detail(i)
  317. self.get_video_info(element)
  318. self.swipe_up()
  319. time.sleep(1)
  320. if self.swipe_count > 100:
  321. return
  322. print("下滑完成")
  323. Common.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  324. AliyunLogger.logging(
  325. code="1000",
  326. platform=self.platform,
  327. mode=self.log_type,
  328. env=self.env,
  329. message="已抓取完一组,休眠 5 秒\n",
  330. )
  331. time.sleep(5)
  332. def run():
  333. rule_dict1 = {"period": {"min": 365, "max": 365},
  334. "duration": {"min": 0, "max": 0},
  335. "favorite_cnt": {"min": 0, "max": 0},
  336. "videos_cnt": {"min": 5000, "max": 0},
  337. "share_cnt": {"min": 0, "max": 0}}
  338. PPQSiftRecommend("recommend", "piaopiaoquan", "dev", rule_dict1, [64120158])
  339. if __name__ == "__main__":
  340. process = multiprocessing.Process(
  341. target=run
  342. )
  343. process.start()
  344. while True:
  345. if not process.is_alive():
  346. print("正在重启")
  347. process.terminate()
  348. time.sleep(60)
  349. os.system("adb forward --remove-all")
  350. process = multiprocessing.Process(target=run)
  351. process.start()
  352. time.sleep(60)