piaopiaoquan.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. # -*- coding: utf-8 -*-
  2. # @Author: zhangyong
  3. # @Time: 2023/11/24
  4. import json
  5. import os
  6. import random
  7. import sys
  8. import time
  9. import uuid
  10. from hashlib import md5
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from bs4 import BeautifulSoup
  14. from selenium.common.exceptions import NoSuchElementException
  15. from selenium.webdriver.common.by import By
  16. import multiprocessing
  17. sys.path.append(os.getcwd())
  18. from application.common.log import AliyunLogger, Local
  19. from application.common.messageQueue import MQ
  20. from application.functions import get_redirect_url
  21. from application.pipeline import PiaoQuanPipeline
  22. class PPQRecommend:
  23. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  24. self.mq = MQ(topic_name="topic_crawler_etl_" + env)
  25. self.platform = "piaopiaoquan"
  26. self.download_cnt = 0
  27. self.element_list = []
  28. self.count = 0
  29. self.swipe_count = 0
  30. self.log_type = log_type
  31. self.crawler = crawler
  32. self.env = env
  33. self.rule_dict = rule_dict
  34. self.our_uid = our_uid
  35. chromedriverExecutable = "/usr/bin/chromedriver"
  36. self.aliyun_log = AliyunLogger(platform=crawler, mode=log_type, env=env)
  37. Local.logger(self.log_type, self.crawler).info("启动微信")
  38. # 微信的配置文件
  39. caps = {
  40. "platformName": "Android",
  41. "devicesName": "Android",
  42. "appPackage": "com.tencent.mm",
  43. "appActivity": ".ui.LauncherUI",
  44. "autoGrantPermissions": "true",
  45. "noReset": True,
  46. "resetkeyboard": True,
  47. "unicodekeyboard": True,
  48. "showChromedriverLog": True,
  49. "printPageSourceOnFailure": True,
  50. "recreateChromeDriverSessions": True,
  51. "enableWebviewDetailsCollection": True,
  52. "setWebContentsDebuggingEnabled": True,
  53. "newCommandTimeout": 6000,
  54. "automationName": "UiAutomator2",
  55. "chromedriverExecutable": chromedriverExecutable,
  56. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  57. }
  58. try:
  59. self.driver = webdriver.Remote("http://localhost:4750/wd/hub", caps)
  60. except Exception as e:
  61. print(e)
  62. self.aliyun_log.logging(
  63. code="3002",
  64. message=f'appium 启动异常: {e}'
  65. )
  66. return
  67. self.driver.implicitly_wait(30)
  68. for i in range(120):
  69. try:
  70. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  71. Local.logger(self.log_type, self.crawler).info("微信启动成功")
  72. # Common.logging(self.log_type, self.crawler, self.env, '微信启动成功')
  73. self.aliyun_log.logging(
  74. code="1000",
  75. message="启动微信成功"
  76. )
  77. break
  78. elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  79. Local.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  80. # Common.logging(self.log_type, self.crawler, self.env, '发现并关闭系统下拉菜单')
  81. self.aliyun_log.logging(
  82. code="1000",
  83. message="发现并关闭系统下拉菜单"
  84. )
  85. size = self.driver.get_window_size()
  86. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
  87. int(size['width'] * 0.5), int(size['height'] * 0.2), 200)
  88. else:
  89. pass
  90. except NoSuchElementException:
  91. self.aliyun_log.logging(
  92. code="3001",
  93. message="打开微信异常"
  94. )
  95. time.sleep(1)
  96. Local.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  97. size = self.driver.get_window_size()
  98. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  99. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  100. time.sleep(1)
  101. Local.logger(self.log_type, self.crawler).info('打开小程序"漂漂圈丨福年"')
  102. self.driver.find_elements(By.XPATH, '//*[@text="漂漂圈丨福年"]')[-1].click()
  103. self.aliyun_log.logging(
  104. code="1000",
  105. message="打开小程序漂漂圈丨福年成功"
  106. )
  107. time.sleep(5)
  108. self.get_videoList()
  109. time.sleep(1)
  110. self.driver.quit()
  111. def search_elements(self, xpath):
  112. time.sleep(1)
  113. windowHandles = self.driver.window_handles
  114. for handle in windowHandles:
  115. self.driver.switch_to.window(handle)
  116. time.sleep(1)
  117. try:
  118. elements = self.driver.find_elements(By.XPATH, xpath)
  119. if elements:
  120. return elements
  121. except NoSuchElementException:
  122. pass
  123. def check_to_applet(self, xpath):
  124. time.sleep(1)
  125. webViews = self.driver.contexts
  126. self.driver.switch_to.context(webViews[-1])
  127. windowHandles = self.driver.window_handles
  128. for handle in windowHandles:
  129. self.driver.switch_to.window(handle)
  130. time.sleep(1)
  131. try:
  132. self.driver.find_element(By.XPATH, xpath)
  133. Local.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  134. # Common.logging(self.log_type, self.crawler, self.env, '切换到WebView成功\n')
  135. self.aliyun_log.logging(
  136. code="1000",
  137. message="成功切换到 webview"
  138. )
  139. return
  140. except NoSuchElementException:
  141. time.sleep(1)
  142. def swipe_up(self):
  143. self.search_elements('//*[@class="dynamic--title-container"]')
  144. size = self.driver.get_window_size()
  145. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  146. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  147. self.swipe_count += 1
  148. def get_video_url(self, video_title_element):
  149. for i in range(3):
  150. self.search_elements('//*[@class="dynamic--title-container"]')
  151. Local.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  152. time.sleep(1)
  153. Local.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  154. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  155. video_title_element[0])
  156. time.sleep(3)
  157. Local.logger(self.log_type, self.crawler).info("点击标题")
  158. video_title_element[0].click()
  159. self.check_to_applet(xpath=r'//wx-video[@class="infos--title infos--ellipsis"]')
  160. Local.logger(self.log_type, self.crawler).info("点击标题完成")
  161. time.sleep(10)
  162. video_url_elements = self.search_elements(
  163. '//wx-video[@class="dynamic-index--video-item dynamic-index--video"]')
  164. if video_url_elements:
  165. return video_url_elements[0].get_attribute("src")
  166. def parse_detail(self, index):
  167. page_source = self.driver.page_source
  168. soup = BeautifulSoup(page_source, 'html.parser')
  169. soup.prettify()
  170. video_list = soup.findAll(name="wx-view", attrs={"class": "expose--adapt-parent"})
  171. element_list = [i for i in video_list][index:]
  172. return element_list[0]
  173. def get_video_info_2(self, video_element):
  174. Local.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  175. # Common.logging(self.log_type, self.crawler, self.env, f"本轮已抓取{self.download_cnt}条视频\n")
  176. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  177. self.count = 0
  178. self.download_cnt = 0
  179. self.element_list = []
  180. return
  181. self.count += 1
  182. Local.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  183. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  184. trace_id = self.crawler + str(uuid.uuid1())
  185. self.aliyun_log.logging(
  186. code="1001",
  187. trace_id=trace_id,
  188. message="扫描到一条视频",
  189. )
  190. # 标题
  191. video_title = video_element.find("wx-view", class_="dynamic--title").text
  192. # 播放量字符串
  193. play_str = video_element.find("wx-view", class_="dynamic--views").text
  194. # 视频时长
  195. duration_str = video_element.find("wx-view", class_="dynamic--duration").text
  196. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  197. # 头像 URL
  198. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  199. # 封面 URL
  200. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  201. play_cnt = int(play_str.replace("+", "").replace("次播放", ""))
  202. duration = int(duration_str.split(":")[0].strip()) * 60 + int(duration_str.split(":")[-1].strip())
  203. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  204. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  205. video_dict = {
  206. "video_title": video_title,
  207. "video_id": out_video_id,
  208. 'out_video_id': out_video_id,
  209. "duration_str": duration_str,
  210. "duration": duration,
  211. "play_str": play_str,
  212. "play_cnt": play_cnt,
  213. "like_str": "",
  214. "like_cnt": 0,
  215. "comment_cnt": 0,
  216. "share_cnt": 0,
  217. "user_name": user_name,
  218. "user_id": out_user_id,
  219. 'publish_time_stamp': int(time.time()),
  220. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  221. 'update_time_stamp': int(time.time()),
  222. "avatar_url": avatar_url,
  223. "cover_url": cover_url,
  224. "session": f"piaopiiaoquan-{int(time.time())}"
  225. }
  226. pipeline = PiaoQuanPipeline(
  227. platform=self.crawler,
  228. mode=self.log_type,
  229. item=video_dict,
  230. rule_dict=self.rule_dict,
  231. env=self.env,
  232. trace_id=trace_id
  233. )
  234. flag = pipeline.process_item()
  235. if flag:
  236. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  237. if video_title_element is None:
  238. return
  239. Local.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  240. self.aliyun_log.logging(
  241. code="1000",
  242. message="点击标题,进入视频详情页",
  243. )
  244. video_url = self.get_video_url(video_title_element)
  245. video_url = get_redirect_url(video_url)
  246. if video_url is None:
  247. self.driver.press_keycode(AndroidKey.BACK)
  248. time.sleep(5)
  249. return
  250. video_dict['video_url'] = video_url
  251. video_dict["platform"] = self.crawler
  252. video_dict["strategy"] = self.log_type
  253. video_dict["out_video_id"] = video_dict["video_id"]
  254. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  255. video_dict["user_id"] = self.our_uid
  256. video_dict["publish_time"] = video_dict["publish_time_str"]
  257. self.mq.send_msg(video_dict)
  258. self.download_cnt += 1
  259. self.driver.press_keycode(AndroidKey.BACK)
  260. time.sleep(5)
  261. def get_video_info(self, video_element):
  262. try:
  263. self.get_video_info_2(video_element)
  264. except Exception as e:
  265. self.driver.press_keycode(AndroidKey.BACK)
  266. Local.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  267. self.aliyun_log.logging(
  268. code="3001",
  269. message=f"抓取单条视频异常:{e}\n"
  270. )
  271. def get_videoList(self):
  272. self.driver.implicitly_wait(20)
  273. # 切换到 web_view
  274. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  275. print("切换到 webview 成功")
  276. time.sleep(1)
  277. page = 0
  278. if self.search_elements('//*[@class="expose--adapt-parent"]') is None:
  279. Local.logger(self.log_type, self.crawler).info("窗口已销毁\n")
  280. # Common.logging(self.log_type, self.crawler, self.env, '窗口已销毁\n')
  281. self.aliyun_log.logging(
  282. code="3000",
  283. message="窗口已销毁"
  284. )
  285. self.count = 0
  286. self.download_cnt = 0
  287. self.element_list = []
  288. return
  289. print("开始获取视频信息")
  290. for i in range(50):
  291. print("下滑{}次".format(i))
  292. element = self.parse_detail(i)
  293. self.get_video_info(element)
  294. self.swipe_up()
  295. time.sleep(1)
  296. if self.swipe_count > 100:
  297. return
  298. print("下滑完成")
  299. # time.sleep(100)
  300. Local.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  301. # Common.logging(self.log_type, self.crawler, self.env, "已抓取完一组,休眠 5 秒\n")
  302. self.aliyun_log.logging(
  303. code="1000",
  304. message="已抓取完一组,休眠 5 秒\n",
  305. )
  306. time.sleep(5)
  307. def run():
  308. rule_dict1 = {"period": {"min": 365, "max": 365},
  309. "duration": {"min": 30, "max": 1800},
  310. "favorite_cnt": {"min": 0, "max": 0},
  311. "videos_cnt": {"min": 5000, "max": 0},
  312. "share_cnt": {"min": 0, "max": 0}}
  313. PPQRecommend("recommend", "piaopiaoquan", "prod", rule_dict1, [64120158, 64120157, 63676778])
  314. if __name__ == "__main__":
  315. process = multiprocessing.Process(
  316. target=run
  317. )
  318. process.start()
  319. while True:
  320. if not process.is_alive():
  321. print("正在重启")
  322. process.terminate()
  323. time.sleep(60)
  324. os.system("adb forward --remove-all")
  325. process = multiprocessing.Process(target=run)
  326. process.start()
  327. time.sleep(60)