zhufuhaoyunbaofu.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373
  1. # -*- coding: utf-8 -*-
  2. import json
  3. import multiprocessing
  4. import os
  5. import random
  6. import subprocess
  7. import sys
  8. import time
  9. import uuid
  10. from hashlib import md5
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from appium.webdriver.common.touch_action import TouchAction
  14. from bs4 import BeautifulSoup
  15. from selenium.common.exceptions import NoSuchElementException
  16. from selenium.webdriver.common.by import By
  17. sys.path.append(os.getcwd())
  18. from application.functions import get_redirect_url
  19. from application.pipeline import PiaoQuanPipelineTest, PiaoQuanPipeline
  20. from application.common.log import AliyunLogger, Local
  21. from application.common import MysqlHelper, Feishu
  22. class ZFHYBFRecommend(object):
  23. """
  24. 北京银时光技术有限公司 - 祝福好运暴富线下爬虫
  25. """
  26. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  27. self.mq = None
  28. self.platform = "zhufuhaoyunbaofu"
  29. self.download_cnt = 0
  30. self.element_list = []
  31. self.count = 0
  32. self.swipe_count = 0
  33. self.log_type = log_type
  34. self.crawler = crawler
  35. self.env = env
  36. self.rule_dict = rule_dict
  37. self.our_uid = our_uid
  38. chromedriverExecutable = "/Users/tzld/Downloads/chromedriver_mac64/chromedriver"
  39. self.aliyun_log = AliyunLogger(platform=crawler, mode=log_type, env=env)
  40. print("启动微信")
  41. # 微信的配置文件
  42. caps = {
  43. "platformName": "Android",
  44. "devicesName": "Android",
  45. "appPackage": "com.tencent.mm",
  46. "appActivity": ".ui.LauncherUI",
  47. "autoGrantPermissions": True,
  48. "noReset": True,
  49. "resetkeyboard": True,
  50. "unicodekeyboard": True,
  51. "showChromedriverLog": True,
  52. "printPageSourceOnFailure": True,
  53. "recreateChromeDriverSessions": True,
  54. "enableWebviewDetailsCollection": True,
  55. "setWebContentsDebuggingEnabled": True,
  56. "newCommandTimeout": 6000,
  57. "automationName": "UiAutomator2",
  58. "chromedriverExecutable": chromedriverExecutable,
  59. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  60. }
  61. try:
  62. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  63. except Exception as e:
  64. print(e)
  65. self.aliyun_log.logging(
  66. code="3002",
  67. message=f'appium 启动异常: {e}'
  68. )
  69. return
  70. self.driver.implicitly_wait(30)
  71. for i in range(10):
  72. try:
  73. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  74. Local.logger(self.log_type, self.crawler).info("微信启动成功")
  75. self.aliyun_log.logging(
  76. code="1000",
  77. message="启动微信成功"
  78. )
  79. break
  80. elif self.driver.find_element(
  81. By.ID, "com.android.systemui:id/dismiss_view"
  82. ):
  83. Local.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  84. # Common.logging(self.log_type, self.crawler, self.env, '发现并关闭系统下拉菜单')
  85. self.aliyun_log.logging(
  86. code="1000",
  87. message="发现并关闭系统下拉菜单"
  88. )
  89. size = self.driver.get_window_size()
  90. self.driver.swipe(
  91. int(size["width"] * 0.5),
  92. int(size["height"] * 0.8),
  93. int(size["width"] * 0.5),
  94. int(size["height"] * 0.2),
  95. 200,
  96. )
  97. else:
  98. pass
  99. except Exception as e:
  100. self.aliyun_log.logging(
  101. code="3001",
  102. message="打开微信异常"
  103. )
  104. time.sleep(1)
  105. Local.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  106. size = self.driver.get_window_size()
  107. self.driver.swipe(
  108. int(size["width"] * 0.5),
  109. int(size["height"] * 0.2),
  110. int(size["width"] * 0.5),
  111. int(size["height"] * 0.8),
  112. 200,
  113. )
  114. command = 'adb shell service call statusbar 2'
  115. process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
  116. process.communicate()
  117. time.sleep(1)
  118. self.driver.find_elements(By.XPATH, '//*[@text="祝福好运暴富"]')[-1].click()
  119. self.aliyun_log.logging(
  120. code="1000",
  121. message="打开小程序 祝福好运暴富 成功"
  122. )
  123. time.sleep(5)
  124. self.get_videoList()
  125. time.sleep(1)
  126. self.driver.quit()
  127. def search_elements(self, xpath):
  128. time.sleep(1)
  129. windowHandles = self.driver.window_handles
  130. for handle in windowHandles:
  131. self.driver.switch_to.window(handle)
  132. time.sleep(1)
  133. try:
  134. elements = self.driver.find_elements(By.XPATH, xpath)
  135. if elements:
  136. return elements
  137. except NoSuchElementException:
  138. pass
  139. def check_to_applet(self, xpath):
  140. time.sleep(1)
  141. webViews = self.driver.contexts
  142. self.driver.switch_to.context(webViews[-1])
  143. windowHandles = self.driver.window_handles
  144. for handle in windowHandles:
  145. self.driver.switch_to.window(handle)
  146. time.sleep(1)
  147. try:
  148. self.driver.find_element(By.XPATH, xpath)
  149. print("切换到WebView成功\n")
  150. return
  151. except NoSuchElementException:
  152. time.sleep(1)
  153. def swipe_up(self):
  154. self.search_elements('//*[@class="dynamic--album"]')
  155. size = self.driver.get_window_size()
  156. action = TouchAction(self.driver)
  157. action.press(x=int(size["width"] * 0.2), y=int(size["height"] * 0.8))
  158. action.wait(ms=200) # 可以调整等待时间
  159. action.move_to(x=int(size["width"] * 0.2), y=int(size["height"] * 0.8))
  160. action.release()
  161. action.perform()
  162. self.swipe_count += 1
  163. def get_video_url(self, video_title_element):
  164. for i in range(3):
  165. self.search_elements('//*[@class="dynamic--title"]')
  166. time.sleep(1)
  167. self.driver.execute_script(
  168. "arguments[0].scrollIntoView({block:'center',inline:'center'});",
  169. video_title_element[0],
  170. )
  171. time.sleep(3)
  172. video_title_element[0].click()
  173. self.check_to_applet(
  174. xpath=r'//wx-video[@class="index--video-item index--video"]'
  175. )
  176. time.sleep(10)
  177. video_url_elements = self.search_elements(
  178. '//wx-video[@class="index--video-item index--video"]'
  179. )
  180. return video_url_elements[0].get_attribute("src")
  181. def parse_detail(self, index):
  182. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  183. page_source = self.driver.page_source
  184. soup = BeautifulSoup(page_source, "html.parser")
  185. soup.prettify()
  186. video_list = soup.findAll(
  187. name="wx-view", attrs={"class": "expose--adapt-parent"}
  188. )
  189. element_list = [i for i in video_list][index:]
  190. return element_list[0]
  191. def get_video_info_2(self, video_element):
  192. self.count += 1
  193. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  194. trace_id = self.crawler + str(uuid.uuid1())
  195. self.aliyun_log.logging(
  196. code="1001",
  197. trace_id=trace_id,
  198. message="扫描到一条视频",
  199. )
  200. # 标题
  201. video_title = video_element.find("wx-view", class_="dynamic--title").text
  202. # 播放量字符串
  203. play_str = video_element.find("wx-view", class_="dynamic--views").text
  204. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  205. # 头像 URL
  206. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  207. # 封面 URL
  208. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  209. play_cnt = int(play_str.replace("\n", ""))
  210. if play_cnt < 10000:
  211. return
  212. out_video_id = md5(video_title.encode("utf8")).hexdigest()
  213. out_user_id = md5(user_name.encode("utf8")).hexdigest()
  214. video_dict = {
  215. "video_title": video_title,
  216. "video_id": out_video_id,
  217. "out_video_id": out_video_id,
  218. "duration_str": 0,
  219. "duration": 0,
  220. "play_str": play_str,
  221. "play_cnt": play_cnt,
  222. "like_str": 0,
  223. "like_cnt": 0,
  224. "comment_cnt": 0,
  225. "share_cnt": 0,
  226. "user_name": user_name,
  227. "user_id": out_user_id,
  228. "publish_time_stamp": int(time.time()),
  229. "publish_time_str": time.strftime(
  230. "%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))
  231. ),
  232. "update_time_stamp": int(time.time()),
  233. "avatar_url": avatar_url,
  234. "cover_url": cover_url,
  235. "session": f"zhufuhaoyunbaofu-{int(time.time())}",
  236. }
  237. pipeline = PiaoQuanPipeline(
  238. platform=self.crawler,
  239. mode=self.log_type,
  240. item=video_dict,
  241. rule_dict=self.rule_dict,
  242. env=self.env,
  243. trace_id=trace_id,
  244. )
  245. flag = pipeline.process_item()
  246. if flag:
  247. video_title_element = self.search_elements(
  248. f'//*[contains(text(), "{video_title}")]'
  249. )
  250. if video_title_element is None:
  251. return
  252. Local.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  253. self.aliyun_log.logging(
  254. code="1000",
  255. message="点击标题,进入视频详情页",
  256. )
  257. video_url = self.get_video_url(video_title_element)
  258. video_url = get_redirect_url(video_url)
  259. if video_url is None:
  260. self.driver.press_keycode(AndroidKey.BACK)
  261. time.sleep(5)
  262. return
  263. video_dict["video_url"] = video_url
  264. video_dict["platform"] = self.crawler
  265. video_dict["strategy"] = self.log_type
  266. video_dict["out_video_id"] = video_dict["video_id"]
  267. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  268. video_dict["user_id"] = self.our_uid
  269. video_dict["publish_time"] = video_dict["publish_time_str"]
  270. values = [[
  271. video_dict["video_id"],
  272. time.strftime(
  273. "%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))
  274. ),
  275. video_title,
  276. cover_url,
  277. video_url,
  278. play_cnt
  279. ]]
  280. Feishu.insert_columns('xiaoniangao', 'xiaoniangao', "FPJe7M", "ROWS", 1, 2)
  281. time.sleep(0.5)
  282. Feishu.update_values('xiaoniangao', 'xiaoniangao', "FPJe7M", "A2:Z2", values)
  283. self.download_cnt += 1
  284. self.mq.send_msg(video_dict)
  285. self.aliyun_log.logging(
  286. code="1002",
  287. message="成功发送至ETL",
  288. data=video_dict
  289. )
  290. self.download_cnt += 1
  291. self.driver.press_keycode(AndroidKey.BACK)
  292. time.sleep(5)
  293. def get_video_info(self, video_element):
  294. try:
  295. self.get_video_info_2(video_element)
  296. except Exception as e:
  297. self.driver.press_keycode(AndroidKey.BACK)
  298. Local.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  299. self.aliyun_log.logging(
  300. code="3001",
  301. message=f"抓取单条视频异常:{e}\n"
  302. )
  303. def get_videoList(self):
  304. self.driver.press_keycode(AndroidKey.BACK)
  305. time.sleep(40)
  306. # 关闭广告
  307. x = 993
  308. y = 72
  309. self.driver.tap([(x, y)])
  310. """
  311. 获取视频列表
  312. :return:
  313. """
  314. self.driver.implicitly_wait(20)
  315. # 切换到 web_view
  316. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  317. print("切换到 webview 成功")
  318. time.sleep(1)
  319. if self.search_elements('//*[@class="expose--adapt-parent"]') is None:
  320. self.aliyun_log.logging(
  321. code="3000",
  322. message="窗口已销毁"
  323. )
  324. self.count = 0
  325. self.download_cnt = 0
  326. self.element_list = []
  327. return
  328. print("开始获取视频信息")
  329. for i in range(50):
  330. print("下滑{}次".format(i))
  331. element = self.parse_detail(i)
  332. self.get_video_info(element)
  333. if i >= 3 and (i - 3) % 4 == 0:
  334. self.swipe_up()
  335. time.sleep(random.randint(1, 5))
  336. Local.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  337. self.aliyun_log.logging(
  338. code="1000",
  339. message="已抓取完一组,休眠 5 秒\n",
  340. )
  341. time.sleep(5)
  342. if __name__ == "__main__":
  343. rule_dict1 = {"period": {"min": 0, "max": 365},
  344. "duration": {"min": 0, "max": 1800},
  345. "favorite_cnt": {"min": 0, "max": 0},
  346. "videos_cnt": {"min": 0, "max": 0},
  347. "share_cnt": {"min": 0, "max": 0}}
  348. ZFHYBFRecommend("recommend", "zhufuhaoyunbaofu", "prod", rule_dict1, [64120158, 64120157, 63676778])