zhufuhaoyunbaofu.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373
  1. # -*- coding: utf-8 -*-
  2. import json
  3. import multiprocessing
  4. import os
  5. import random
  6. import subprocess
  7. import sys
  8. import time
  9. import uuid
  10. from hashlib import md5
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from appium.webdriver.common.touch_action import TouchAction
  14. from bs4 import BeautifulSoup
  15. from selenium.common.exceptions import NoSuchElementException
  16. from selenium.webdriver.common.by import By
  17. sys.path.append(os.getcwd())
  18. from application.functions import get_redirect_url
  19. from application.pipeline import PiaoQuanPipelineTest
  20. from application.common.log import AliyunLogger, Local
  21. from application.common import MysqlHelper
  22. class ZFHYBFRecommend(object):
  23. """
  24. 北京银时光技术有限公司 - 祝福好运暴富线下爬虫
  25. """
  26. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  27. self.mq = None
  28. self.platform = "zhufuhaoyunbaofu"
  29. self.download_cnt = 0
  30. self.element_list = []
  31. self.count = 0
  32. self.swipe_count = 0
  33. self.log_type = log_type
  34. self.crawler = crawler
  35. self.env = env
  36. self.rule_dict = rule_dict
  37. self.our_uid = our_uid
  38. chromedriverExecutable = "/Users/tzld/Downloads/chromedriver_mac64/chromedriver"
  39. self.aliyun_log = AliyunLogger(platform=crawler, mode=log_type, env=env)
  40. print("启动微信")
  41. # 微信的配置文件
  42. caps = {
  43. "platformName": "Android",
  44. "devicesName": "Android",
  45. "appPackage": "com.tencent.mm",
  46. "appActivity": ".ui.LauncherUI",
  47. "autoGrantPermissions": True,
  48. "noReset": True,
  49. "resetkeyboard": True,
  50. "unicodekeyboard": True,
  51. "showChromedriverLog": True,
  52. "printPageSourceOnFailure": True,
  53. "recreateChromeDriverSessions": True,
  54. "enableWebviewDetailsCollection": True,
  55. "setWebContentsDebuggingEnabled": True,
  56. "newCommandTimeout": 6000,
  57. "automationName": "UiAutomator2",
  58. "chromedriverExecutable": chromedriverExecutable,
  59. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  60. }
  61. try:
  62. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  63. except Exception as e:
  64. print(e)
  65. self.aliyun_log.logging(
  66. code="3002",
  67. message=f'appium 启动异常: {e}'
  68. )
  69. return
  70. self.driver.implicitly_wait(30)
  71. for i in range(10):
  72. try:
  73. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  74. Local.logger(self.log_type, self.crawler).info("微信启动成功")
  75. self.aliyun_log.logging(
  76. code="1000",
  77. message="启动微信成功"
  78. )
  79. break
  80. elif self.driver.find_element(
  81. By.ID, "com.android.systemui:id/dismiss_view"
  82. ):
  83. Local.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  84. # Common.logging(self.log_type, self.crawler, self.env, '发现并关闭系统下拉菜单')
  85. self.aliyun_log.logging(
  86. code="1000",
  87. message="发现并关闭系统下拉菜单"
  88. )
  89. size = self.driver.get_window_size()
  90. self.driver.swipe(
  91. int(size["width"] * 0.5),
  92. int(size["height"] * 0.8),
  93. int(size["width"] * 0.5),
  94. int(size["height"] * 0.2),
  95. 200,
  96. )
  97. else:
  98. pass
  99. except Exception as e:
  100. self.aliyun_log.logging(
  101. code="3001",
  102. message="打开微信异常"
  103. )
  104. time.sleep(1)
  105. Local.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  106. size = self.driver.get_window_size()
  107. self.driver.swipe(
  108. int(size["width"] * 0.5),
  109. int(size["height"] * 0.2),
  110. int(size["width"] * 0.5),
  111. int(size["height"] * 0.8),
  112. 200,
  113. )
  114. command = 'adb shell service call statusbar 2'
  115. process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
  116. process.communicate()
  117. time.sleep(1)
  118. self.driver.find_elements(By.XPATH, '//*[@text="祝福好运暴富"]')[-1].click()
  119. self.aliyun_log.logging(
  120. code="1000",
  121. message="打开小程序 祝福好运暴富 成功"
  122. )
  123. time.sleep(5)
  124. self.get_videoList()
  125. time.sleep(1)
  126. self.driver.quit()
  127. def repeat_video(self, out_video_id, video_title):
  128. sql = f""" select * from crawler_video where platform = "{self.platform}" and out_video_id="{out_video_id}" and video_title="{video_title}"; """
  129. repeat_video = MysqlHelper.select(sql=sql)
  130. if repeat_video:
  131. message = "重复的视频"
  132. print(message)
  133. return False
  134. return True
  135. def search_elements(self, xpath):
  136. time.sleep(1)
  137. windowHandles = self.driver.window_handles
  138. for handle in windowHandles:
  139. self.driver.switch_to.window(handle)
  140. time.sleep(1)
  141. try:
  142. elements = self.driver.find_elements(By.XPATH, xpath)
  143. if elements:
  144. return elements
  145. except NoSuchElementException:
  146. pass
  147. def check_to_applet(self, xpath):
  148. time.sleep(1)
  149. webViews = self.driver.contexts
  150. self.driver.switch_to.context(webViews[-1])
  151. windowHandles = self.driver.window_handles
  152. for handle in windowHandles:
  153. self.driver.switch_to.window(handle)
  154. time.sleep(1)
  155. try:
  156. self.driver.find_element(By.XPATH, xpath)
  157. print("切换到WebView成功\n")
  158. return
  159. except NoSuchElementException:
  160. time.sleep(1)
  161. def swipe_up(self):
  162. self.search_elements('//*[@class="dynamic--album"]')
  163. size = self.driver.get_window_size()
  164. action = TouchAction(self.driver)
  165. action.press(x=int(size["width"] * 0.2), y=int(size["height"] * 0.8))
  166. action.wait(ms=200) # 可以调整等待时间
  167. action.move_to(x=int(size["width"] * 0.2), y=int(size["height"] * 0.8))
  168. action.release()
  169. action.perform()
  170. self.swipe_count += 1
  171. def get_video_url(self, video_title_element):
  172. for i in range(3):
  173. self.search_elements('//*[@class="dynamic--title"]')
  174. time.sleep(1)
  175. self.driver.execute_script(
  176. "arguments[0].scrollIntoView({block:'center',inline:'center'});",
  177. video_title_element[0],
  178. )
  179. time.sleep(3)
  180. video_title_element[0].click()
  181. self.check_to_applet(
  182. xpath=r'//wx-video[@class="index--video-item index--video"]'
  183. )
  184. time.sleep(10)
  185. video_url_elements = self.search_elements(
  186. '//wx-video[@class="index--video-item index--video"]'
  187. )
  188. return video_url_elements[0].get_attribute("src")
  189. def parse_detail(self, index):
  190. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  191. page_source = self.driver.page_source
  192. soup = BeautifulSoup(page_source, "html.parser")
  193. soup.prettify()
  194. video_list = soup.findAll(
  195. name="wx-view", attrs={"class": "expose--adapt-parent"}
  196. )
  197. element_list = [i for i in video_list][index:]
  198. return element_list[0]
  199. def get_video_info_2(self, video_element):
  200. self.count += 1
  201. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  202. trace_id = self.crawler + str(uuid.uuid1())
  203. self.aliyun_log.logging(
  204. code="1001",
  205. trace_id=trace_id,
  206. message="扫描到一条视频",
  207. )
  208. # 标题
  209. video_title = video_element.find("wx-view", class_="dynamic--title").text
  210. # 播放量字符串
  211. play_str = video_element.find("wx-view", class_="dynamic--views").text
  212. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  213. # 头像 URL
  214. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  215. # 封面 URL
  216. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  217. play_cnt = int(play_str.replace("\n", ""))
  218. if play_cnt < 10000:
  219. return
  220. out_video_id = md5(video_title.encode("utf8")).hexdigest()
  221. out_user_id = md5(user_name.encode("utf8")).hexdigest()
  222. repeat_video = self.repeat_video(out_video_id, video_title)
  223. if repeat_video == False:
  224. return
  225. video_dict = {
  226. "video_title": video_title,
  227. "video_id": out_video_id,
  228. "out_video_id": out_video_id,
  229. "duration_str": 0,
  230. "duration": 0,
  231. "play_str": play_str,
  232. "play_cnt": play_cnt,
  233. "like_str": 0,
  234. "like_cnt": 0,
  235. "comment_cnt": 0,
  236. "share_cnt": 0,
  237. "user_name": user_name,
  238. "user_id": out_user_id,
  239. "publish_time_stamp": int(time.time()),
  240. "publish_time_str": time.strftime(
  241. "%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))
  242. ),
  243. "update_time_stamp": int(time.time()),
  244. "avatar_url": avatar_url,
  245. "cover_url": cover_url,
  246. "session": f"zhufuhaoyunbaofu-{int(time.time())}",
  247. }
  248. pipeline = PiaoQuanPipelineTest(
  249. platform=self.crawler,
  250. mode=self.log_type,
  251. item=video_dict,
  252. rule_dict=self.rule_dict,
  253. env=self.env,
  254. trace_id=trace_id,
  255. )
  256. flag = pipeline.process_item()
  257. if flag:
  258. video_title_element = self.search_elements(
  259. f'//*[contains(text(), "{video_title}")]'
  260. )
  261. if video_title_element is None:
  262. return
  263. Local.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  264. self.aliyun_log.logging(
  265. code="1000",
  266. message="点击标题,进入视频详情页",
  267. )
  268. video_url = self.get_video_url(video_title_element)
  269. video_url = get_redirect_url(video_url)
  270. if video_url is None:
  271. self.driver.press_keycode(AndroidKey.BACK)
  272. time.sleep(5)
  273. return
  274. video_dict["video_url"] = video_url
  275. video_dict["platform"] = self.crawler
  276. video_dict["strategy"] = self.log_type
  277. video_dict["out_video_id"] = video_dict["video_id"]
  278. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  279. video_dict["user_id"] = self.our_uid
  280. video_dict["publish_time"] = video_dict["publish_time_str"]
  281. self.download_cnt += 1
  282. self.mq.send_msg(video_dict)
  283. self.aliyun_log.logging(
  284. code="1002",
  285. message="成功发送至ETL",
  286. data=video_dict
  287. )
  288. self.download_cnt += 1
  289. self.driver.press_keycode(AndroidKey.BACK)
  290. time.sleep(5)
  291. def get_video_info(self, video_element):
  292. try:
  293. self.get_video_info_2(video_element)
  294. except Exception as e:
  295. self.driver.press_keycode(AndroidKey.BACK)
  296. Local.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  297. self.aliyun_log.logging(
  298. code="3001",
  299. message=f"抓取单条视频异常:{e}\n"
  300. )
  301. def get_videoList(self):
  302. self.driver.press_keycode(AndroidKey.BACK)
  303. time.sleep(40)
  304. # 关闭广告
  305. x = 993
  306. y = 72
  307. self.driver.tap([(x, y)])
  308. """
  309. 获取视频列表
  310. :return:
  311. """
  312. self.driver.implicitly_wait(20)
  313. # 切换到 web_view
  314. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  315. print("切换到 webview 成功")
  316. time.sleep(1)
  317. if self.search_elements('//*[@class="expose--adapt-parent"]') is None:
  318. self.aliyun_log.logging(
  319. code="3000",
  320. message="窗口已销毁"
  321. )
  322. self.count = 0
  323. self.download_cnt = 0
  324. self.element_list = []
  325. return
  326. print("开始获取视频信息")
  327. for i in range(50):
  328. print("下滑{}次".format(i))
  329. element = self.parse_detail(i)
  330. self.get_video_info(element)
  331. if i >= 3 and (i - 3) % 4 == 0:
  332. self.swipe_up()
  333. time.sleep(random.randint(1, 5))
  334. Local.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  335. self.aliyun_log.logging(
  336. code="1000",
  337. message="已抓取完一组,休眠 5 秒\n",
  338. )
  339. time.sleep(5)
  340. if __name__ == "__main__":
  341. rule_dict1 = {"period": {"min": 0, "max": 365},
  342. "duration": {"min": 0, "max": 1800},
  343. "favorite_cnt": {"min": 0, "max": 0},
  344. "videos_cnt": {"min": 0, "max": 0},
  345. "share_cnt": {"min": 0, "max": 0}}
  346. ZFHYBFRecommend("recommend", "zhufuhaoyunbaofu", "prod", rule_dict1, [64120158, 64120157, 63676778])