zhufuquanzi_recommend_new2.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410
  1. # -*- coding: utf-8 -*-
  2. # @Time: 2023/11/17
  3. import json
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import uuid
  9. from datetime import datetime, timedelta
  10. from hashlib import md5
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from appium.webdriver.webdriver import WebDriver
  14. from bs4 import BeautifulSoup
  15. from selenium.common import NoSuchElementException
  16. from selenium.webdriver.common.by import By
  17. sys.path.append(os.getcwd())
  18. from common import AliyunLogger, PiaoQuanPipeline, get_redirect_url
  19. from common.common import Common
  20. from common.mq import MQ
  21. from common.scheduling_db import MysqlHelper
  22. class ZFQZRecommendNew:
  23. env = None
  24. driver = None
  25. log_type = None
  26. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  27. self.mq = None
  28. self.platform = "zhufuquanzi"
  29. self.download_cnt = 0
  30. self.element_list = []
  31. self.count = 0
  32. self.swipe_count = 0
  33. self.log_type = log_type
  34. self.crawler = crawler
  35. self.env = env
  36. self.rule_dict = rule_dict
  37. self.our_uid = our_uid
  38. if self.env == "dev":
  39. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  40. else:
  41. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  42. Common.logger(self.log_type, self.crawler).info("启动微信")
  43. # Common.logging(self.log_type, self.crawler, self.env, '启动微信')
  44. # 微信的配置文件
  45. caps = {
  46. "platformName": "Android",
  47. "devicesName": "Android",
  48. # "platformVersion": "11",
  49. # "udid": "emulator-5554",
  50. "appPackage": "com.tencent.mm",
  51. "appActivity": ".ui.LauncherUI",
  52. "autoGrantPermissions": "true",
  53. "noReset": True,
  54. "resetkeyboard": True,
  55. "unicodekeyboard": True,
  56. "showChromedriverLog": True,
  57. "printPageSourceOnFailure": True,
  58. "recreateChromeDriverSessions": True,
  59. "enableWebviewDetailsCollection": True,
  60. "setWebContentsDebuggingEnabled": True,
  61. "newCommandTimeout": 6000,
  62. "automationName": "UiAutomator2",
  63. "chromedriverExecutable": chromedriverExecutable,
  64. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  65. }
  66. try:
  67. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  68. except Exception as e:
  69. print(e)
  70. AliyunLogger.logging(
  71. code="3002",
  72. platform=self.platform,
  73. mode=self.log_type,
  74. env=self.env,
  75. message=f'appium 启动异常: {e}'
  76. )
  77. return
  78. self.driver.implicitly_wait(30)
  79. for i in range(10):
  80. try:
  81. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  82. Common.logger(self.log_type, self.crawler).info("微信启动成功")
  83. AliyunLogger.logging(
  84. code="1000",
  85. platform=self.platform,
  86. mode=self.log_type,
  87. env=self.env,
  88. message="启动微信成功"
  89. )
  90. break
  91. elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  92. Common.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  93. AliyunLogger.logging(
  94. code="1000",
  95. platform=self.platform,
  96. mode=self.log_type,
  97. env=self.env,
  98. message="发现并关闭系统下拉菜单"
  99. )
  100. size = self.driver.get_window_size()
  101. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
  102. int(size['width'] * 0.5), int(size['height'] * 0.2), 200)
  103. else:
  104. pass
  105. except Exception as e:
  106. AliyunLogger.logging(
  107. code="3001",
  108. platform=self.platform,
  109. mode=self.log_type,
  110. env=self.env,
  111. message=f"打开微信异常:{e}"
  112. )
  113. time.sleep(1)
  114. Common.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  115. size = self.driver.get_window_size()
  116. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  117. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  118. time.sleep(1)
  119. time.sleep(1)
  120. Common.logger(log_type, crawler).info('打开小程序"祝福圈子"')
  121. self.driver.find_elements(By.XPATH, '//*[@text="祝福圈子"]')[-1].click()
  122. AliyunLogger.logging(
  123. code="1000",
  124. platform=self.platform,
  125. mode=log_type,
  126. env=env,
  127. message='打开小程序"祝福圈子"成功'
  128. )
  129. time.sleep(5)
  130. self.get_videoList()
  131. time.sleep(1)
  132. self.driver.quit()
  133. def check_to_applet(self, xpath):
  134. time.sleep(1)
  135. webViews = self.driver.contexts
  136. self.driver.switch_to.context(webViews[-1])
  137. windowHandles = self.driver.window_handles
  138. for handle in windowHandles:
  139. self.driver.switch_to.window(handle)
  140. time.sleep(1)
  141. try:
  142. self.driver.find_element(By.XPATH, xpath)
  143. Common.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  144. AliyunLogger.logging(
  145. code="1000",
  146. platform=self.platform,
  147. mode=self.log_type,
  148. env=self.env,
  149. message="成功切换到 webview"
  150. )
  151. return
  152. except NoSuchElementException:
  153. time.sleep(1)
  154. def swipe_up(self):
  155. self.search_elements('//*[@class="bless--list"]')
  156. size = self.driver.get_window_size()
  157. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  158. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  159. self.swipe_count += 1
  160. def parse_detail(self, index):
  161. page_source = self.driver.page_source
  162. soup = BeautifulSoup(page_source, 'html.parser')
  163. soup.prettify()
  164. video_list = soup.findAll(name="wx-view", attrs={"class": "expose--adapt-parent"})
  165. element_list = [i for i in video_list][index:]
  166. return element_list[0]
  167. def get_videoList(self):
  168. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  169. self.driver.implicitly_wait(20)
  170. self.check_to_applet(xpath='//*[@class="tags--tag tags--tag-0 tags--checked"]')
  171. time.sleep(1)
  172. name = ["推荐", "春节"]
  173. selected_text = random.choice(name)
  174. try:
  175. self.driver.find_element(By.XPATH, f"//wx-button[contains(., '{selected_text}')]").click()
  176. time.sleep(2)
  177. except NoSuchElementException:
  178. Common.logger(self.log_type, self.crawler).info(f"没有该tab:{selected_text}\n")
  179. pass
  180. print("开始获取视频信息")
  181. for i in range(20):
  182. print("下滑{}次".format(i))
  183. element = self.parse_detail(i)
  184. self.get_video_info(element)
  185. self.swipe_up()
  186. time.sleep(1)
  187. if self.swipe_count > 100:
  188. return
  189. print("下滑完成")
  190. # time.sleep(100)
  191. Common.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  192. AliyunLogger.logging(
  193. code="1000",
  194. platform=self.platform,
  195. mode=self.log_type,
  196. env=self.env,
  197. message="已抓取完一组,休眠 5 秒\n",
  198. )
  199. time.sleep(5)
  200. def search_elements(self, xpath):
  201. time.sleep(1)
  202. windowHandles = self.driver.window_handles
  203. for handle in windowHandles:
  204. self.driver.switch_to.window(handle)
  205. time.sleep(1)
  206. try:
  207. elements = self.driver.find_elements(By.XPATH, xpath)
  208. if elements:
  209. return elements
  210. except NoSuchElementException:
  211. pass
  212. def get_video_info(self, video_element):
  213. try:
  214. self.get_video_info_2(video_element)
  215. except Exception as e:
  216. self.driver.press_keycode(AndroidKey.BACK)
  217. Common.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  218. AliyunLogger.logging(
  219. code="3001",
  220. platform=self.platform,
  221. mode=self.log_type,
  222. env=self.env,
  223. message=f"抓取单条视频异常:{e}\n"
  224. )
  225. def get_video_url(self, video_title_element):
  226. for i in range(3):
  227. self.search_elements('//*[@class="bless--list"]')
  228. Common.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  229. time.sleep(1)
  230. Common.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  231. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  232. video_title_element[0])
  233. time.sleep(3)
  234. Common.logger(self.log_type, self.crawler).info("点击标题")
  235. video_title_element[0].click()
  236. time.sleep(5)
  237. self.check_to_applet(xpath=r'//*[@class="index--video-item index--video"]')
  238. Common.logger(self.log_type, self.crawler).info("点击标题完成")
  239. time.sleep(10)
  240. video_url_elements = self.search_elements(
  241. '//*[@class="index--video-item index--video"]')
  242. Common.logger(self.log_type, self.crawler).info(f"{video_url_elements[0].get_attribute('src')}")
  243. return video_url_elements[0].get_attribute('src')
  244. def repeat_video(self,out_video_id):
  245. current_time = datetime.now()
  246. previous_day = current_time - timedelta(days=7)
  247. formatted_time = previous_day.strftime("%Y-%m-%d")
  248. sql = f""" select * from crawler_video where platform = "{self.platform}" and out_video_id="{out_video_id}" and create_time <= '{formatted_time}'; """
  249. repeat_video = MysqlHelper.get_values(
  250. log_type=self.log_type, crawler=self.platform, env=self.env, sql=sql, action=""
  251. )
  252. if repeat_video:
  253. return False
  254. return True
  255. def get_video_info_2(self, video_element):
  256. Common.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  257. # Common.logging(self.log_type, self.crawler, self.env, f"本轮已抓取{self.download_cnt}条视频\n")
  258. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  259. self.count = 0
  260. self.download_cnt = 0
  261. self.element_list = []
  262. return
  263. self.count += 1
  264. Common.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  265. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  266. trace_id = self.crawler + str(uuid.uuid1())
  267. video_title = video_element.find("wx-view", class_="dynamic--title").text
  268. play_str = video_element.find("wx-view", class_="dynamic--views").text
  269. # like_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[0].text
  270. # comment_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[1].text
  271. duration_str = video_element.find("wx-view", class_="dynamic--duration").text
  272. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  273. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  274. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  275. play_cnt = int(play_str.replace("+", "").replace("次播放", ""))
  276. duration = int(duration_str.split(":")[0].strip()) * 60 + int(duration_str.split(":")[-1].strip())
  277. # if "点赞" in like_str:
  278. # like_cnt = 0
  279. # elif "万" in like_str:
  280. # like_cnt = int(like_str.split("万")[0]) * 10000
  281. # else:
  282. # like_cnt = int(like_str)
  283. # if "评论" in comment_str:
  284. # comment_cnt = 0
  285. # elif "万" in comment_str:
  286. # comment_cnt = int(comment_str.split("万")[0]) * 10000
  287. # else:
  288. # comment_cnt = int(comment_str)
  289. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  290. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  291. repeat_id = self.repeat_video(out_video_id)
  292. if False == repeat_id:
  293. num = time.time()
  294. out_video_id = out_video_id+str(num)
  295. video_dict = {
  296. "video_title": video_title,
  297. "video_id": out_video_id,
  298. 'out_video_id': out_video_id,
  299. "duration_str": duration_str,
  300. "duration": duration,
  301. "play_str": play_str,
  302. "play_cnt": play_cnt,
  303. "like_str": "",
  304. "like_cnt": 50,
  305. "comment_cnt": 0,
  306. "share_cnt": 50,
  307. "user_name": user_name,
  308. "user_id": out_user_id,
  309. 'publish_time_stamp': int(time.time()),
  310. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  311. 'update_time_stamp': int(time.time()),
  312. "avatar_url": avatar_url,
  313. "cover_url": cover_url,
  314. "session": f"zhufuquanzi-{int(time.time())}"
  315. }
  316. AliyunLogger.logging(
  317. code="1001",
  318. platform=self.platform,
  319. mode=self.log_type,
  320. env=self.env,
  321. trace_id=trace_id,
  322. message="扫描到一条视频",
  323. data=video_dict
  324. )
  325. pipeline = PiaoQuanPipeline(
  326. platform=self.crawler,
  327. mode=self.log_type,
  328. item=video_dict,
  329. rule_dict=self.rule_dict,
  330. env=self.env,
  331. trace_id=trace_id
  332. )
  333. flag = pipeline.process_item()
  334. if flag:
  335. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  336. if video_title_element is None:
  337. return
  338. Common.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  339. AliyunLogger.logging(
  340. code="1000",
  341. platform=self.platform,
  342. mode=self.log_type,
  343. env=self.env,
  344. message="点击标题,进入视频详情页",
  345. )
  346. video_url = self.get_video_url(video_title_element)
  347. video_url = get_redirect_url(video_url)
  348. if video_url is None:
  349. self.driver.press_keycode(AndroidKey.BACK)
  350. time.sleep(5)
  351. return
  352. video_dict['video_url'] = video_url
  353. video_dict['like_cnt'] = 0
  354. video_dict['share_cnt'] = 0
  355. video_dict["platform"] = self.crawler
  356. video_dict["strategy"] = self.log_type
  357. video_dict["out_video_id"] = video_dict["video_id"]
  358. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  359. video_dict["user_id"] = self.our_uid
  360. video_dict["publish_time"] = video_dict["publish_time_str"]
  361. self.mq.send_msg(video_dict)
  362. AliyunLogger.logging(
  363. code="1002",
  364. platform=self.platform,
  365. mode=self.log_type,
  366. env=self.env,
  367. trace_id=trace_id,
  368. message="发送到ETL成功",
  369. data=video_dict
  370. )
  371. self.download_cnt += 1
  372. self.driver.press_keycode(AndroidKey.BACK)
  373. time.sleep(5)
  374. if __name__ == "__main__":
  375. rule_dict1 = {"period": {"min": 0, "max": 0},
  376. "duration": {"min": 1, "max": 0},
  377. "favorite_cnt": {"min": 0, "max": 0},
  378. "videos_cnt": {"min": 0, "max": 0},
  379. "share_cnt": {"min": 0, "max": 0}}
  380. ZFQZRecommendNew("recommend", "zhufuquanzi", "dev", rule_dict1, 6267141)