zhufuquanzi_recommend_new2.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414
  1. # -*- coding: utf-8 -*-
  2. # @Time: 2023/11/17
  3. import json
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import uuid
  9. import subprocess
  10. from datetime import datetime, timedelta
  11. from hashlib import md5
  12. from appium import webdriver
  13. from appium.webdriver.extensions.android.nativekey import AndroidKey
  14. from appium.webdriver.webdriver import WebDriver
  15. from bs4 import BeautifulSoup
  16. from selenium.common import NoSuchElementException
  17. from selenium.webdriver.common.by import By
  18. sys.path.append(os.getcwd())
  19. from common import AliyunLogger, PiaoQuanPipeline, get_redirect_url
  20. from common.common import Common
  21. from common.mq import MQ
  22. from common.scheduling_db import MysqlHelper
  23. class ZFQZRecommendNew:
  24. env = None
  25. driver = None
  26. log_type = None
  27. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  28. self.mq = None
  29. self.platform = "zhufuquanzi"
  30. self.download_cnt = 0
  31. self.element_list = []
  32. self.count = 0
  33. self.swipe_count = 0
  34. self.log_type = log_type
  35. self.crawler = crawler
  36. self.env = env
  37. self.rule_dict = rule_dict
  38. self.our_uid = our_uid
  39. if self.env == "dev":
  40. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  41. else:
  42. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver-mac-x64/chromedriver"
  43. Common.logger(self.log_type, self.crawler).info("启动微信")
  44. # Common.logging(self.log_type, self.crawler, self.env, '启动微信')
  45. # 微信的配置文件
  46. caps = {
  47. "platformName": "Android",
  48. "devicesName": "Android",
  49. # "platformVersion": "11",
  50. # "udid": "emulator-5554",
  51. "appPackage": "com.tencent.mm",
  52. "appActivity": ".ui.LauncherUI",
  53. "autoGrantPermissions": "true",
  54. "noReset": True,
  55. "resetkeyboard": True,
  56. "unicodekeyboard": True,
  57. "showChromedriverLog": True,
  58. "printPageSourceOnFailure": True,
  59. "recreateChromeDriverSessions": True,
  60. "enableWebviewDetailsCollection": True,
  61. "setWebContentsDebuggingEnabled": True,
  62. "newCommandTimeout": 6000,
  63. "automationName": "UiAutomator2",
  64. "chromedriverExecutable": chromedriverExecutable,
  65. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  66. }
  67. try:
  68. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  69. except Exception as e:
  70. print(e)
  71. AliyunLogger.logging(
  72. code="3002",
  73. platform=self.platform,
  74. mode=self.log_type,
  75. env=self.env,
  76. message=f'appium 启动异常: {e}'
  77. )
  78. return
  79. self.driver.implicitly_wait(30)
  80. for i in range(10):
  81. try:
  82. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  83. Common.logger(self.log_type, self.crawler).info("微信启动成功")
  84. AliyunLogger.logging(
  85. code="1000",
  86. platform=self.platform,
  87. mode=self.log_type,
  88. env=self.env,
  89. message="启动微信成功"
  90. )
  91. break
  92. elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  93. Common.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  94. AliyunLogger.logging(
  95. code="1000",
  96. platform=self.platform,
  97. mode=self.log_type,
  98. env=self.env,
  99. message="发现并关闭系统下拉菜单"
  100. )
  101. size = self.driver.get_window_size()
  102. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
  103. int(size['width'] * 0.5), int(size['height'] * 0.2), 100)
  104. else:
  105. pass
  106. except Exception as e:
  107. AliyunLogger.logging(
  108. code="3001",
  109. platform=self.platform,
  110. mode=self.log_type,
  111. env=self.env,
  112. message=f"打开微信异常:{e}"
  113. )
  114. time.sleep(1)
  115. Common.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  116. size = self.driver.get_window_size()
  117. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  118. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  119. command = 'adb shell service call statusbar 2'
  120. process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
  121. process.communicate()
  122. time.sleep(1)
  123. Common.logger(log_type, crawler).info('打开小程序"祝福圈子"')
  124. self.driver.find_elements(By.XPATH, '//*[@text="祝福圈子"]')[-1].click()
  125. AliyunLogger.logging(
  126. code="1000",
  127. platform=self.platform,
  128. mode=log_type,
  129. env=env,
  130. message='打开小程序"祝福圈子"成功'
  131. )
  132. time.sleep(5)
  133. self.get_videoList()
  134. time.sleep(1)
  135. self.driver.quit()
  136. def check_to_applet(self, xpath):
  137. time.sleep(1)
  138. webViews = self.driver.contexts
  139. self.driver.switch_to.context(webViews[-1])
  140. windowHandles = self.driver.window_handles
  141. for handle in windowHandles:
  142. self.driver.switch_to.window(handle)
  143. time.sleep(1)
  144. try:
  145. self.driver.find_element(By.XPATH, xpath)
  146. Common.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  147. AliyunLogger.logging(
  148. code="1000",
  149. platform=self.platform,
  150. mode=self.log_type,
  151. env=self.env,
  152. message="成功切换到 webview"
  153. )
  154. return
  155. except NoSuchElementException:
  156. time.sleep(1)
  157. def swipe_up(self):
  158. self.search_elements('//*[@class="bless--list"]')
  159. size = self.driver.get_window_size()
  160. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  161. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  162. self.swipe_count += 1
  163. def parse_detail(self, index):
  164. page_source = self.driver.page_source
  165. soup = BeautifulSoup(page_source, 'html.parser')
  166. soup.prettify()
  167. video_list = soup.findAll(name="wx-view", attrs={"class": "expose--adapt-parent"})
  168. element_list = [i for i in video_list][index:]
  169. return element_list[0]
  170. def get_videoList(self):
  171. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  172. self.driver.implicitly_wait(20)
  173. self.check_to_applet(xpath='//*[@class="tags--tag tags--tag-0 tags--checked"]')
  174. time.sleep(1)
  175. name = ["推荐"]
  176. selected_text = random.choice(name)
  177. try:
  178. self.driver.find_element(By.XPATH, f"//wx-button[contains(., '{selected_text}')]").click()
  179. time.sleep(2)
  180. except NoSuchElementException:
  181. Common.logger(self.log_type, self.crawler).info(f"没有该tab:{selected_text}\n")
  182. pass
  183. print("开始获取视频信息")
  184. for i in range(20):
  185. print("下滑{}次".format(i))
  186. element = self.parse_detail(i)
  187. self.get_video_info(element)
  188. self.swipe_up()
  189. time.sleep(1)
  190. if self.swipe_count > 100:
  191. return
  192. print("下滑完成")
  193. # time.sleep(100)
  194. Common.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  195. AliyunLogger.logging(
  196. code="1000",
  197. platform=self.platform,
  198. mode=self.log_type,
  199. env=self.env,
  200. message="已抓取完一组,休眠 5 秒\n",
  201. )
  202. time.sleep(5)
  203. def search_elements(self, xpath):
  204. time.sleep(1)
  205. windowHandles = self.driver.window_handles
  206. for handle in windowHandles:
  207. self.driver.switch_to.window(handle)
  208. time.sleep(1)
  209. try:
  210. elements = self.driver.find_elements(By.XPATH, xpath)
  211. if elements:
  212. return elements
  213. except NoSuchElementException:
  214. pass
  215. def get_video_info(self, video_element):
  216. try:
  217. self.get_video_info_2(video_element)
  218. except Exception as e:
  219. self.driver.press_keycode(AndroidKey.BACK)
  220. Common.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  221. AliyunLogger.logging(
  222. code="3001",
  223. platform=self.platform,
  224. mode=self.log_type,
  225. env=self.env,
  226. message=f"抓取单条视频异常:{e}\n"
  227. )
  228. def get_video_url(self, video_title_element):
  229. for i in range(3):
  230. self.search_elements('//*[@class="bless--list"]')
  231. Common.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  232. time.sleep(1)
  233. Common.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  234. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  235. video_title_element[0])
  236. time.sleep(3)
  237. Common.logger(self.log_type, self.crawler).info("点击标题")
  238. video_title_element[0].click()
  239. time.sleep(5)
  240. self.check_to_applet(xpath=r'//*[@class="dynamic-index--video-item dynamic-index--video"]')
  241. Common.logger(self.log_type, self.crawler).info("点击标题完成")
  242. time.sleep(10)
  243. video_url_elements = self.search_elements(
  244. '//*[@class="dynamic-index--video-item dynamic-index--video"]')
  245. Common.logger(self.log_type, self.crawler).info(f"{video_url_elements[0].get_attribute('src')}")
  246. return video_url_elements[0].get_attribute('src')
  247. def repeat_video(self,out_video_id):
  248. current_time = datetime.now()
  249. previous_day = current_time - timedelta(days=7)
  250. formatted_time = previous_day.strftime("%Y-%m-%d")
  251. sql = f""" select * from crawler_video where platform = "{self.platform}" and out_video_id="{out_video_id}" and create_time <= '{formatted_time}'; """
  252. repeat_video = MysqlHelper.get_values(
  253. log_type=self.log_type, crawler=self.platform, env=self.env, sql=sql, action=""
  254. )
  255. if repeat_video:
  256. return False
  257. return True
  258. def get_video_info_2(self, video_element):
  259. Common.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  260. # Common.logging(self.log_type, self.crawler, self.env, f"本轮已抓取{self.download_cnt}条视频\n")
  261. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  262. self.count = 0
  263. self.download_cnt = 0
  264. self.element_list = []
  265. return
  266. self.count += 1
  267. Common.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  268. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  269. trace_id = self.crawler + str(uuid.uuid1())
  270. video_title = video_element.find("wx-view", class_="dynamic--title").text
  271. play_str = video_element.find("wx-view", class_="dynamic--views").text
  272. # like_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[0].text
  273. # comment_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[1].text
  274. duration_str = video_element.find("wx-view", class_="dynamic--duration").text
  275. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  276. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  277. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  278. play_cnt = int(play_str.replace("+", "").replace("次播放", ""))
  279. duration = int(duration_str.split(":")[0].strip()) * 60 + int(duration_str.split(":")[-1].strip())
  280. # if "点赞" in like_str:
  281. # like_cnt = 0
  282. # elif "万" in like_str:
  283. # like_cnt = int(like_str.split("万")[0]) * 10000
  284. # else:
  285. # like_cnt = int(like_str)
  286. # if "评论" in comment_str:
  287. # comment_cnt = 0
  288. # elif "万" in comment_str:
  289. # comment_cnt = int(comment_str.split("万")[0]) * 10000
  290. # else:
  291. # comment_cnt = int(comment_str)
  292. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  293. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  294. repeat_id = self.repeat_video(out_video_id)
  295. if False == repeat_id:
  296. num = time.time()
  297. out_video_id = out_video_id+str(num)
  298. video_dict = {
  299. "video_title": video_title,
  300. "video_id": out_video_id,
  301. 'out_video_id': out_video_id,
  302. "duration_str": duration_str,
  303. "duration": duration,
  304. "play_str": play_str,
  305. "play_cnt": play_cnt,
  306. "like_str": "",
  307. "like_cnt": 50,
  308. "comment_cnt": 0,
  309. "share_cnt": 50,
  310. "user_name": user_name,
  311. "user_id": out_user_id,
  312. 'publish_time_stamp': int(time.time()),
  313. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  314. 'update_time_stamp': int(time.time()),
  315. "avatar_url": avatar_url,
  316. "cover_url": cover_url,
  317. "session": f"zhufuquanzi-{int(time.time())}"
  318. }
  319. AliyunLogger.logging(
  320. code="1001",
  321. platform=self.platform,
  322. mode=self.log_type,
  323. env=self.env,
  324. trace_id=trace_id,
  325. message="扫描到一条视频",
  326. data=video_dict
  327. )
  328. pipeline = PiaoQuanPipeline(
  329. platform=self.crawler,
  330. mode=self.log_type,
  331. item=video_dict,
  332. rule_dict=self.rule_dict,
  333. env=self.env,
  334. trace_id=trace_id
  335. )
  336. flag = pipeline.process_item()
  337. if flag:
  338. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  339. if video_title_element is None:
  340. return
  341. Common.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  342. AliyunLogger.logging(
  343. code="1000",
  344. platform=self.platform,
  345. mode=self.log_type,
  346. env=self.env,
  347. message="点击标题,进入视频详情页",
  348. )
  349. video_url = self.get_video_url(video_title_element)
  350. video_url = get_redirect_url(video_url)
  351. if video_url is None:
  352. self.driver.press_keycode(AndroidKey.BACK)
  353. time.sleep(5)
  354. return
  355. video_dict['video_url'] = video_url
  356. video_dict['like_cnt'] = 0
  357. video_dict['share_cnt'] = 0
  358. video_dict["platform"] = self.crawler
  359. video_dict["strategy"] = self.log_type
  360. video_dict["out_video_id"] = video_dict["video_id"]
  361. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  362. video_dict["user_id"] = self.our_uid
  363. video_dict["publish_time"] = video_dict["publish_time_str"]
  364. self.mq.send_msg(video_dict)
  365. AliyunLogger.logging(
  366. code="1002",
  367. platform=self.platform,
  368. mode=self.log_type,
  369. env=self.env,
  370. trace_id=trace_id,
  371. message="发送到ETL成功",
  372. data=video_dict
  373. )
  374. self.download_cnt += 1
  375. self.driver.press_keycode(AndroidKey.BACK)
  376. time.sleep(5)
  377. if __name__ == "__main__":
  378. rule_dict1 = {"period": {"min": 0, "max": 0},
  379. "duration": {"min": 1, "max": 0},
  380. "favorite_cnt": {"min": 0, "max": 0},
  381. "videos_cnt": {"min": 0, "max": 0},
  382. "share_cnt": {"min": 0, "max": 0}}
  383. ZFQZRecommendNew("recommend", "zhufuquanzi", "dev", rule_dict1, 6267141)