zhufuquanzi_recommend_new.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411
  1. # -*- coding: utf-8 -*-
  2. # @Time: 2024/05/08
  3. import json
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import uuid
  9. from datetime import datetime, timedelta
  10. from hashlib import md5
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from appium.webdriver.webdriver import WebDriver
  14. from bs4 import BeautifulSoup
  15. from selenium.common import NoSuchElementException
  16. from selenium.webdriver.common.by import By
  17. sys.path.append(os.getcwd())
  18. from common import AliyunLogger, PiaoQuanPipeline, get_redirect_url
  19. from common.common import Common
  20. from common.mq import MQ
  21. from common.scheduling_db import MysqlHelper
  22. """
  23. 祝福年糕圈
  24. """
  25. class ZFQZRecommendNew:
  26. env = None
  27. driver = None
  28. log_type = None
  29. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  30. self.mq = None
  31. self.platform = "zhufuquanzi"
  32. self.download_cnt = 0
  33. self.element_list = []
  34. self.count = 0
  35. self.swipe_count = 0
  36. self.log_type = log_type
  37. self.crawler = crawler
  38. self.env = env
  39. self.rule_dict = rule_dict
  40. self.our_uid = our_uid
  41. if self.env == "dev":
  42. chromedriverExecutable = "/Users/tzld/Downloads/chromedriver_mac64/chromedriver"
  43. else:
  44. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  45. Common.logger(self.log_type, self.crawler).info("启动微信")
  46. # Common.logging(self.log_type, self.crawler, self.env, '启动微信')
  47. # 微信的配置文件
  48. caps = {
  49. "platformName": "Android",
  50. "devicesName": "Android",
  51. # "platformVersion": "11",
  52. # "udid": "emulator-5554",
  53. "appPackage": "com.tencent.mm",
  54. "appActivity": ".ui.LauncherUI",
  55. "autoGrantPermissions": "true",
  56. "noReset": True,
  57. "resetkeyboard": True,
  58. "unicodekeyboard": True,
  59. "showChromedriverLog": True,
  60. "printPageSourceOnFailure": True,
  61. "recreateChromeDriverSessions": True,
  62. "enableWebviewDetailsCollection": True,
  63. "setWebContentsDebuggingEnabled": True,
  64. "newCommandTimeout": 6000,
  65. "automationName": "UiAutomator2",
  66. "chromedriverExecutable": chromedriverExecutable,
  67. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  68. }
  69. try:
  70. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  71. except Exception as e:
  72. print(e)
  73. AliyunLogger.logging(
  74. code="3002",
  75. platform=self.platform,
  76. mode=self.log_type,
  77. env=self.env,
  78. message=f'appium 启动异常: {e}'
  79. )
  80. return
  81. self.driver.implicitly_wait(30)
  82. for i in range(10):
  83. try:
  84. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  85. Common.logger(self.log_type, self.crawler).info("微信启动成功")
  86. AliyunLogger.logging(
  87. code="1000",
  88. platform=self.platform,
  89. mode=self.log_type,
  90. env=self.env,
  91. message="启动微信成功"
  92. )
  93. break
  94. elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  95. Common.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  96. AliyunLogger.logging(
  97. code="1000",
  98. platform=self.platform,
  99. mode=self.log_type,
  100. env=self.env,
  101. message="发现并关闭系统下拉菜单"
  102. )
  103. size = self.driver.get_window_size()
  104. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
  105. int(size['width'] * 0.5), int(size['height'] * 0.2), 200)
  106. else:
  107. pass
  108. except Exception as e:
  109. AliyunLogger.logging(
  110. code="3001",
  111. platform=self.platform,
  112. mode=self.log_type,
  113. env=self.env,
  114. message=f"打开微信异常:{e}"
  115. )
  116. time.sleep(1)
  117. Common.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  118. size = self.driver.get_window_size()
  119. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  120. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  121. time.sleep(1)
  122. time.sleep(1)
  123. Common.logger(log_type, crawler).info('打开小程序"祝福年糕圈"')
  124. self.driver.find_elements(By.XPATH, '//*[@text="祝福年糕圈"]')[-1].click()
  125. AliyunLogger.logging(
  126. code="1000",
  127. platform=self.platform,
  128. mode=log_type,
  129. env=env,
  130. message='打开小程序"祝福年糕圈"成功'
  131. )
  132. time.sleep(5)
  133. self.get_videoList()
  134. time.sleep(1)
  135. self.driver.quit()
  136. def check_to_applet(self, xpath):
  137. time.sleep(1)
  138. webViews = self.driver.contexts
  139. self.driver.switch_to.context(webViews[-1])
  140. windowHandles = self.driver.window_handles
  141. for handle in windowHandles:
  142. self.driver.switch_to.window(handle)
  143. time.sleep(1)
  144. try:
  145. self.driver.find_element(By.XPATH, xpath)
  146. Common.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  147. AliyunLogger.logging(
  148. code="1000",
  149. platform=self.platform,
  150. mode=self.log_type,
  151. env=self.env,
  152. message="成功切换到 webview"
  153. )
  154. return
  155. except NoSuchElementException:
  156. time.sleep(1)
  157. def swipe_up(self):
  158. self.search_elements('//*[@class="expose--adapt-parent"]')
  159. size = self.driver.get_window_size()
  160. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  161. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  162. self.swipe_count += 1
  163. def parse_detail(self, index):
  164. page_source = self.driver.page_source
  165. soup = BeautifulSoup(page_source, 'html.parser')
  166. soup.prettify()
  167. video_list = soup.findAll(name="wx-view", attrs={"class": "expose--adapt-parent"})
  168. element_list = [i for i in video_list][index:]
  169. return element_list[0]
  170. def get_videoList(self):
  171. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  172. self.driver.implicitly_wait(20)
  173. self.check_to_applet(xpath='//*[@class="expose--adapt-parent"]')
  174. time.sleep(1)
  175. # name = ["推荐", "春节"]
  176. # selected_text = random.choice(name)
  177. # try:
  178. # self.driver.find_element(By.XPATH, f"//wx-button[contains(., '{selected_text}')]").click()
  179. # time.sleep(2)
  180. # except NoSuchElementException:
  181. # Common.logger(self.log_type, self.crawler).info(f"没有该tab:{selected_text}\n")
  182. # pass
  183. print("开始获取视频信息")
  184. for i in range(20):
  185. print("下滑{}次".format(i))
  186. element = self.parse_detail(i)
  187. self.get_video_info(element)
  188. self.swipe_up()
  189. time.sleep(1)
  190. if self.swipe_count > 100:
  191. return
  192. print("下滑完成")
  193. Common.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  194. AliyunLogger.logging(
  195. code="1000",
  196. platform=self.platform,
  197. mode=self.log_type,
  198. env=self.env,
  199. message="已抓取完一组,休眠 5 秒\n",
  200. )
  201. time.sleep(5)
  202. def search_elements(self, xpath):
  203. time.sleep(1)
  204. windowHandles = self.driver.window_handles
  205. for handle in windowHandles:
  206. self.driver.switch_to.window(handle)
  207. time.sleep(1)
  208. try:
  209. elements = self.driver.find_elements(By.XPATH, xpath)
  210. if elements:
  211. return elements
  212. except NoSuchElementException:
  213. pass
  214. def get_video_info(self, video_element):
  215. try:
  216. self.get_video_info_2(video_element)
  217. except Exception as e:
  218. self.driver.press_keycode(AndroidKey.BACK)
  219. Common.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  220. AliyunLogger.logging(
  221. code="3001",
  222. platform=self.platform,
  223. mode=self.log_type,
  224. env=self.env,
  225. message=f"抓取单条视频异常:{e}\n"
  226. )
  227. def get_video_url(self, video_title_element):
  228. for i in range(3):
  229. self.search_elements('//*[@class="bless--list"]')
  230. Common.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  231. time.sleep(1)
  232. Common.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  233. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  234. video_title_element[0])
  235. time.sleep(3)
  236. Common.logger(self.log_type, self.crawler).info("点击标题")
  237. video_title_element[0].click()
  238. time.sleep(5)
  239. self.check_to_applet(xpath=r'//*[@class="index--video-item index--video"]')
  240. Common.logger(self.log_type, self.crawler).info("点击标题完成")
  241. time.sleep(10)
  242. video_url_elements = self.search_elements(
  243. '//*[@class="index--video-item index--video"]')
  244. Common.logger(self.log_type, self.crawler).info(f"{video_url_elements[0].get_attribute('src')}")
  245. return video_url_elements[0].get_attribute('src')
  246. def repeat_video(self,out_video_id):
  247. current_time = datetime.now()
  248. previous_day = current_time - timedelta(days=7)
  249. formatted_time = previous_day.strftime("%Y-%m-%d")
  250. sql = f""" select * from crawler_video where platform = "{self.platform}" and out_video_id="{out_video_id}" and create_time <= '{formatted_time}'; """
  251. repeat_video = MysqlHelper.get_values(
  252. log_type=self.log_type, crawler=self.platform, env=self.env, sql=sql, action=""
  253. )
  254. if repeat_video:
  255. return False
  256. return True
  257. def get_video_info_2(self, video_element):
  258. Common.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  259. # Common.logging(self.log_type, self.crawler, self.env, f"本轮已抓取{self.download_cnt}条视频\n")
  260. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  261. self.count = 0
  262. self.download_cnt = 0
  263. self.element_list = []
  264. return
  265. self.count += 1
  266. Common.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  267. # 获取 trace_id, 并且把该 id 当做视频生命周期唯一索引
  268. trace_id = self.crawler + str(uuid.uuid1())
  269. video_title = video_element.find("wx-view", class_="dynamic--title-container").text
  270. play_str = video_element.find("wx-view", class_="dynamic--views").text
  271. like_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[0].text
  272. comment_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[1].text
  273. duration_str = video_element.find("wx-view", class_="dynamic--duration").text
  274. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  275. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  276. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  277. play_cnt = int(play_str.replace("+", "").replace("次播放", ""))
  278. duration = int(duration_str.split(":")[0].strip()) * 60 + int(duration_str.split(":")[-1].strip())
  279. if "点赞" in like_str:
  280. like_cnt = 0
  281. elif "万" in like_str:
  282. like_cnt = int(like_str.split("万")[0]) * 10000
  283. else:
  284. like_cnt = int(like_str)
  285. if "评论" in comment_str:
  286. comment_cnt = 0
  287. elif "万" in comment_str:
  288. comment_cnt = int(comment_str.split("万")[0]) * 10000
  289. else:
  290. comment_cnt = int(comment_str)
  291. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  292. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  293. repeat_id = self.repeat_video(out_video_id)
  294. if False == repeat_id:
  295. num = time.time()
  296. out_video_id = out_video_id+str(num)
  297. video_dict = {
  298. "video_title": video_title,
  299. "video_id": out_video_id,
  300. 'out_video_id': out_video_id,
  301. "duration_str": duration_str,
  302. "duration": duration,
  303. "play_str": play_str,
  304. "play_cnt": play_cnt,
  305. "like_str": "",
  306. "like_cnt": like_cnt,
  307. "comment_cnt": comment_cnt,
  308. "share_cnt": 0,
  309. "user_name": user_name,
  310. "user_id": out_user_id,
  311. 'publish_time_stamp': int(time.time()),
  312. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  313. 'update_time_stamp': int(time.time()),
  314. "avatar_url": avatar_url,
  315. "cover_url": cover_url,
  316. "session": f"zhufuquanzi-{int(time.time())}"
  317. }
  318. AliyunLogger.logging(
  319. code="1001",
  320. platform=self.platform,
  321. mode=self.log_type,
  322. env=self.env,
  323. trace_id=trace_id,
  324. message="扫描到一条视频",
  325. data=video_dict
  326. )
  327. pipeline = PiaoQuanPipeline(
  328. platform=self.crawler,
  329. mode=self.log_type,
  330. item=video_dict,
  331. rule_dict=self.rule_dict,
  332. env=self.env,
  333. trace_id=trace_id
  334. )
  335. flag = pipeline.process_item()
  336. if flag:
  337. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  338. if video_title_element is None:
  339. return
  340. Common.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  341. AliyunLogger.logging(
  342. code="1000",
  343. platform=self.platform,
  344. mode=self.log_type,
  345. env=self.env,
  346. message="点击标题,进入视频详情页",
  347. )
  348. video_url = self.get_video_url(video_title_element)
  349. video_url = get_redirect_url(video_url)
  350. if video_url is None:
  351. self.driver.press_keycode(AndroidKey.BACK)
  352. time.sleep(5)
  353. return
  354. video_dict['video_url'] = video_url
  355. video_dict['like_cnt'] = 0
  356. video_dict['share_cnt'] = 0
  357. video_dict["platform"] = self.crawler
  358. video_dict["strategy"] = self.log_type
  359. video_dict["out_video_id"] = video_dict["video_id"]
  360. video_dict["crawler_rule"] = json.dumps(self.rule_dict)
  361. video_dict["user_id"] = self.our_uid
  362. video_dict["publish_time"] = video_dict["publish_time_str"]
  363. self.mq.send_msg(video_dict)
  364. AliyunLogger.logging(
  365. code="1002",
  366. platform=self.platform,
  367. mode=self.log_type,
  368. env=self.env,
  369. trace_id=trace_id,
  370. message="发送到ETL成功",
  371. data=video_dict
  372. )
  373. self.download_cnt += 1
  374. self.driver.press_keycode(AndroidKey.BACK)
  375. time.sleep(5)
  376. if __name__ == "__main__":
  377. rule_dict1 = {"period": {"min": 0, "max": 0},
  378. "duration": {"min": 1, "max": 0},
  379. "favorite_cnt": {"min": 0, "max": 0},
  380. "videos_cnt": {"min": 1000, "max": 0},
  381. "share_cnt": {"min": 0, "max": 0}}
  382. ZFQZRecommendNew("recommend", "zhufuquanzi", "dev", rule_dict1, 6267141)