zhufuquanzi_recommend2.py 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302
  1. # -*- coding: utf-8 -*-
  2. # @Author: wang
  3. # @Time: 2023/9/6
  4. import json
  5. import os
  6. import sys
  7. import time
  8. from hashlib import md5
  9. from appium import webdriver
  10. from appium.webdriver.extensions.android.nativekey import AndroidKey
  11. from appium.webdriver.webdriver import WebDriver
  12. from bs4 import BeautifulSoup
  13. from selenium.common import NoSuchElementException
  14. from selenium.webdriver.common.by import By
  15. sys.path.append(os.getcwd())
  16. from common.common import Common
  17. from common.mq import MQ
  18. from common.public import download_rule, get_config_from_mysql
  19. from common.scheduling_db import MysqlHelper
  20. class ZFQZRecommend:
  21. platform = "祝福圈子"
  22. download_cnt = 0
  23. @classmethod
  24. def start_wechat(cls, log_type, crawler, env, rule_dict, our_uid):
  25. if env == "dev":
  26. chromedriverExecutable = "/Users/wangkun/Downloads/chromedriver/chromedriver_v111/chromedriver"
  27. else:
  28. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  29. Common.logger(log_type, crawler).info("启动微信")
  30. Common.logging(log_type, crawler, env, '启动微信')
  31. caps = {
  32. "platformName": "Android",
  33. "devicesName": "Android",
  34. "platformVersion": "7",
  35. # "udid": "emulator-5554",
  36. "appPackage": "com.tencent.mm",
  37. "appActivity": ".ui.LauncherUI",
  38. "autoGrantPermissions": "true",
  39. "noReset": True,
  40. "resetkeyboard": True,
  41. "unicodekeyboard": True,
  42. "showChromedriverLog": True,
  43. "printPageSourceOnFailure": True,
  44. "recreateChromeDriverSessions": True,
  45. "enableWebviewDetailsCollection": True,
  46. "setWebContentsDebuggingEnabled": True,
  47. "newCommandTimeout": 6000,
  48. "automationName": "UiAutomator2",
  49. "chromedriverExecutable": chromedriverExecutable,
  50. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  51. }
  52. driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  53. driver.implicitly_wait(30)
  54. for i in range(120):
  55. try:
  56. if driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  57. Common.logger(log_type, crawler).info("微信启动成功")
  58. Common.logging(log_type, crawler, env, '微信启动成功')
  59. break
  60. elif driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  61. Common.logger(log_type, crawler).info("发现并关闭系统下拉菜单")
  62. Common.logging(log_type, crawler, env, '发现并关闭系统下拉菜单')
  63. driver.find_element(By.ID, "com.android.system:id/dismiss_view").click()
  64. else:
  65. pass
  66. except NoSuchElementException:
  67. time.sleep(1)
  68. Common.logger(log_type, crawler).info("下滑,展示小程序选择面板")
  69. Common.logging(log_type, crawler, env, '下滑,展示小程序选择面板')
  70. size = driver.get_window_size()
  71. driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  72. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  73. time.sleep(5)
  74. Common.logger(log_type, crawler).info('打开小程序"祝福圈子"')
  75. Common.logging(log_type, crawler, env, '打开小程序"祝福圈子"')
  76. driver.find_elements(By.XPATH, '//*[@text="祝福圈子"]')[-1].click()
  77. time.sleep(10)
  78. cls.get_videoList(log_type, crawler, driver, env, rule_dict, our_uid)
  79. time.sleep(3)
  80. driver.quit()
  81. @classmethod
  82. def search_elements(cls, driver: WebDriver, xpath):
  83. time.sleep(1)
  84. windowHandles = driver.window_handles
  85. for handle in windowHandles:
  86. driver.switch_to.window(handle)
  87. time.sleep(1)
  88. try:
  89. elements = driver.find_elements(By.XPATH, xpath)
  90. if elements:
  91. return elements
  92. except NoSuchElementException:
  93. pass
  94. @classmethod
  95. def check_to_applet(cls, log_type, crawler, env, driver: WebDriver, xpath):
  96. time.sleep(1)
  97. webViews = driver.contexts
  98. Common.logger(log_type, crawler).info(f"webViews:{webViews}")
  99. Common.logging(log_type, crawler, env, f"webViews:{webViews}")
  100. driver.switch_to.context(webViews[1])
  101. windowHandles = driver.window_handles
  102. for handle in windowHandles:
  103. driver.switch_to.window(handle)
  104. time.sleep(1)
  105. try:
  106. driver.find_element(By.XPATH, xpath)
  107. Common.logger(log_type, crawler).info("切换到小程序成功\n")
  108. Common.logging(log_type, crawler, env, '切换到小程序成功\n')
  109. return
  110. except NoSuchElementException:
  111. time.sleep(1)
  112. @classmethod
  113. def repeat_video(cls, log_type, crawler, video_id, env):
  114. sql = f""" select * from crawler_video where platform in ("众妙音信", "刚刚都传", "吉祥幸福", "知青天天看", "zhufuquanzi", "祝福圈子", "haitunzhufu", "海豚祝福") and out_video_id="{video_id}"; """
  115. repeat_video = MysqlHelper.get_values(log_type, crawler, sql, env)
  116. return len(repeat_video)
  117. @classmethod
  118. def swipe_up(cls, driver: WebDriver):
  119. cls.search_elements(driver, '//*[@class="bless--list"]')
  120. size = driver.get_window_size()
  121. driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  122. int(size["width"] * 0.5), int(size["height"] * 0.4), 200)
  123. @classmethod
  124. def get_video_url(cls, log_type, crawler, driver: WebDriver, video_title_element):
  125. for i in range(3):
  126. cls.search_elements(driver, '//*[@class="bless--list"]')
  127. Common.logger(log_type, crawler).info(f"video_title_element:{video_title_element[0]}")
  128. Common.logger(log_type, crawler).info("点击标题")
  129. video_title_element[0].click()
  130. # driver.execute_script("arguments[0].click()", video_title_element[0])
  131. Common.logger(log_type, crawler).info("点击标题完成")
  132. time.sleep(5)
  133. video_url_elements = cls.search_elements(driver, '//*[@class="index--video-item index--video"]')
  134. if video_url_elements:
  135. return video_url_elements[0].get_attribute("src")
  136. @classmethod
  137. def get_videoList(cls, log_type, crawler, driver: WebDriver, env, rule_dict, our_uid):
  138. mq = MQ(topic_name="topic_crawler_etl_" + env)
  139. driver.implicitly_wait(20)
  140. cls.check_to_applet(log_type=log_type, crawler=crawler, env=env, driver=driver,
  141. xpath='//*[@class="tags--tag tags--tag-0 tags--checked"]')
  142. time.sleep(3)
  143. page = 0
  144. while True:
  145. Common.logger(log_type, crawler).info(f"正在抓取第{page + 1}页")
  146. Common.logging(log_type, crawler, env, f"正在抓取第{page + 1}页")
  147. if cls.search_elements(driver, '//*[@class="bless--list"]') is None:
  148. Common.logger(log_type, crawler).info("窗口已销毁\n")
  149. Common.logging(log_type, crawler, env, '窗口已销毁\n')
  150. return
  151. cls.swipe_up(driver)
  152. page_source = driver.page_source
  153. soup = BeautifulSoup(page_source, 'html.parser')
  154. soup.prettify()
  155. video_list_elements = soup.findAll("wx-view", class_="expose--adapt-parent")
  156. Common.logger(log_type, crawler).info(f"第{page + 1}页共:{len(video_list_elements)}条视频\n")
  157. Common.logging(log_type, crawler, env, f"第{page + 1}页共:{len(video_list_elements)}条视频\n")
  158. for i, video_element in enumerate(video_list_elements):
  159. try:
  160. if cls.download_cnt >= int(rule_dict.get("videos_cnt", {}).get("min", 10)):
  161. Common.logger(log_type, crawler).info(f"本轮已抓取视频数:{cls.download_cnt}")
  162. Common.logging(log_type, crawler, env, f"本轮已抓取视频数:{cls.download_cnt}")
  163. cls.download_cnt = 0
  164. return
  165. Common.logger(log_type, crawler).info(f"第{i + 1}条视频")
  166. Common.logging(log_type, crawler, env, f"第{i + 1}条视频")
  167. video_title = video_element.find("wx-view", class_="dynamic--title").text
  168. play_str = video_element.find("wx-view", class_="dynamic--views").text
  169. like_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[0].text
  170. comment_str = video_element.findAll("wx-view", class_="dynamic--commerce-btn-text")[1].text
  171. duration_str = video_element.find("wx-view", class_="dynamic--duration").text
  172. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  173. avatar_url = video_element.find("wx-image", class_="avatar--avatar")["src"]
  174. cover_url = video_element.find("wx-image", class_="dynamic--bg-image")["src"]
  175. play_cnt = int(play_str.replace("+", "").replace("次播放", ""))
  176. duration = int(duration_str.split(":")[0].strip()) * 60 + int(duration_str.split(":")[-1].strip())
  177. if "点赞" in like_str:
  178. like_cnt = 0
  179. elif "万" in like_str:
  180. like_cnt = int(like_str.split("万")[0]) * 10000
  181. else:
  182. like_cnt = int(like_str)
  183. if "评论" in comment_str:
  184. comment_cnt = 0
  185. elif "万" in comment_str:
  186. comment_cnt = int(comment_str.split("万")[0]) * 10000
  187. else:
  188. comment_cnt = int(comment_str)
  189. out_video_id = md5(video_title.encode('utf8')).hexdigest()
  190. out_user_id = md5(user_name.encode('utf8')).hexdigest()
  191. video_dict = {
  192. "video_title": video_title,
  193. "video_id": out_video_id,
  194. "duration_str": duration_str,
  195. "duration": duration,
  196. "play_str": play_str,
  197. "play_cnt": play_cnt,
  198. "like_str": like_str,
  199. "like_cnt": like_cnt,
  200. "comment_cnt": comment_cnt,
  201. "share_cnt": 0,
  202. "user_name": user_name,
  203. "user_id": out_user_id,
  204. 'publish_time_stamp': int(time.time()),
  205. 'publish_time_str': time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time()))),
  206. "avatar_url": avatar_url,
  207. "cover_url": cover_url,
  208. "session": f"zhufuquanzi-{int(time.time())}"
  209. }
  210. for k, v in video_dict.items():
  211. Common.logger(log_type, crawler).info(f"{k}:{v}")
  212. Common.logging(log_type, crawler, env, f"video_dict:{video_dict}")
  213. # Common.logger(log_type, crawler).info(f"==========分割线==========\n")
  214. if video_title is None or cover_url is None:
  215. Common.logger(log_type, crawler).info("无效视频\n")
  216. Common.logging(log_type, crawler, env, '无效视频\n')
  217. elif download_rule(log_type=log_type, crawler=crawler, video_dict=video_dict,
  218. rule_dict=rule_dict) is False:
  219. Common.logger(log_type, crawler).info("不满足抓取规则\n")
  220. Common.logging(log_type, crawler, env, "不满足抓取规则\n")
  221. elif any(str(word) if str(word) in video_dict["video_title"] else False
  222. for word in get_config_from_mysql(log_type=log_type,
  223. source=crawler,
  224. env=env,
  225. text="filter",
  226. action="")) is True:
  227. Common.logger(log_type, crawler).info('已中过滤词\n')
  228. Common.logging(log_type, crawler, env, '已中过滤词\n')
  229. elif cls.repeat_video(log_type, crawler, out_video_id, env) != 0:
  230. Common.logger(log_type, crawler).info('视频已下载\n')
  231. Common.logging(log_type, crawler, env, '视频已下载\n')
  232. else:
  233. video_title_element = cls.search_elements(driver, f'//*[contains(text(), "{video_title}")]')
  234. if video_title_element is None:
  235. Common.logger(log_type, crawler).warning(f"未找到该视频标题的element:{video_title_element}")
  236. Common.logging(log_type, crawler, env, f"未找到该视频标题的element:{video_title_element}")
  237. continue
  238. Common.logger(log_type, crawler).info("点击标题,进入视频详情页")
  239. Common.logging(log_type, crawler, env, "点击标题,进入视频详情页")
  240. video_url = cls.get_video_url(log_type, crawler, driver, video_title_element)
  241. if video_url is None:
  242. Common.logger(log_type, crawler).info("未获取到视频播放地址\n")
  243. driver.press_keycode(AndroidKey.BACK)
  244. time.sleep(5)
  245. continue
  246. video_dict['video_url'] = video_url
  247. Common.logger(log_type, crawler).info(f"video_url:{video_url}\n")
  248. video_dict["platform"] = crawler
  249. video_dict["strategy"] = log_type
  250. video_dict["out_video_id"] = video_dict["video_id"]
  251. video_dict["crawler_rule"] = json.dumps(rule_dict)
  252. video_dict["user_id"] = our_uid
  253. video_dict["publish_time"] = video_dict["publish_time_str"]
  254. mq.send_msg(video_dict)
  255. cls.download_cnt += 1
  256. driver.press_keycode(AndroidKey.BACK)
  257. Common.logger(log_type, crawler).info("符合抓取条件,mq send msg 成功\n")
  258. Common.logging(log_type, crawler, env, "符合抓取条件,ACK MQ 成功\n")
  259. time.sleep(5)
  260. except Exception as e:
  261. Common.logger(log_type, crawler).error(f"抓取单条视频异常:{e}\n")
  262. Common.logging(log_type, crawler, env, f"抓取单条视频异常:{e}\n")
  263. Common.logger(log_type, crawler).info("已抓取完一组,休眠 5 秒\n")
  264. Common.logging(log_type, crawler, env, "已抓取完一组,休眠 5 秒\n")
  265. time.sleep(5)
  266. page += 1
  267. if __name__ == "__main__":
  268. rule_dict1 = {"period": {"min": 365, "max": 365},
  269. "duration": {"min": 30, "max": 1800},
  270. "favorite_cnt": {"min": 5000, "max": 0},
  271. "videos_cnt": {"min": 10, "max": 20},
  272. "share_cnt": {"min": 1000, "max": 0}}
  273. ZFQZRecommend.start_wechat("recommend", "zhufuquanzi", "dev", rule_dict1, 6267141)