xiaoniangao_plus_get_userid.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. # -*- coding: utf-8 -*-
  2. # @Time: 2023/11/14
  3. import json
  4. import os
  5. import random
  6. import sys
  7. import time
  8. import subprocess
  9. from datetime import date, timedelta
  10. import requests
  11. from appium import webdriver
  12. from appium.webdriver.extensions.android.nativekey import AndroidKey
  13. from bs4 import BeautifulSoup
  14. from selenium.common.exceptions import NoSuchElementException
  15. from selenium.webdriver.common.by import By
  16. import multiprocessing
  17. from common import AliyunLogger
  18. from common.feishu import Feishu
  19. from common.public import clean_title, get_config_from_mysql
  20. sys.path.append(os.getcwd())
  21. from common.common import Common
  22. from common.mq import MQ
  23. from common.scheduling_db import MysqlHelper
  24. def get_redirect_url(url):
  25. res = requests.get(url, allow_redirects=False)
  26. if res.status_code == 302 or res.status_code == 301:
  27. return res.headers['Location']
  28. else:
  29. return url
  30. class XiaoNianGaoPlusRecommend:
  31. env = None
  32. driver = None
  33. log_type = None
  34. def __init__(self, log_type, crawler, env, rule_dict, our_uid):
  35. self.mq = None
  36. self.platform = "小年糕+主页账号ID"
  37. self.download_cnt = 0
  38. self.element_list = []
  39. self.count = 0
  40. self.swipe_count = 0
  41. self.log_type = log_type
  42. self.crawler = crawler
  43. self.env = env
  44. self.rule_dict = rule_dict
  45. self.our_uid = our_uid
  46. if self.env == "dev":
  47. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  48. else:
  49. chromedriverExecutable = "/Users/piaoquan/Downloads/chromedriver"
  50. Common.logger(self.log_type, self.crawler).info("启动微信")
  51. # 微信的配置文件
  52. caps = {
  53. "platformName": "Android",
  54. "devicesName": "Android",
  55. "appPackage": "com.tencent.mm",
  56. "appActivity": ".ui.LauncherUI",
  57. "autoGrantPermissions": "true",
  58. "noReset": True,
  59. "resetkeyboard": True,
  60. "unicodekeyboard": True,
  61. "showChromedriverLog": True,
  62. "printPageSourceOnFailure": True,
  63. "recreateChromeDriverSessions": True,
  64. "enableWebviewDetailsCollection": True,
  65. "setWebContentsDebuggingEnabled": True,
  66. "newCommandTimeout": 6000,
  67. "automationName": "UiAutomator2",
  68. "chromedriverExecutable": chromedriverExecutable,
  69. "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
  70. }
  71. self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
  72. self.driver.implicitly_wait(30)
  73. for i in range(120):
  74. try:
  75. if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
  76. Common.logger(self.log_type, self.crawler).info("微信启动成功")
  77. break
  78. elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
  79. Common.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
  80. size = self.driver.get_window_size()
  81. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
  82. int(size['width'] * 0.5), int(size['height'] * 0.2), 200)
  83. else:
  84. pass
  85. except NoSuchElementException:
  86. time.sleep(1)
  87. Common.logger(self.log_type, self.crawler).info("下滑,展示小程序选择面板")
  88. size = self.driver.get_window_size()
  89. self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.2),
  90. int(size['width'] * 0.5), int(size['height'] * 0.8), 200)
  91. time.sleep(1)
  92. command = 'adb shell service call statusbar 2'
  93. process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
  94. process.communicate()
  95. Common.logger(self.log_type, self.crawler).info('打开小程序"小年糕+"')
  96. self.driver.find_elements(By.XPATH, '//*[@text="小年糕+"]')[-1].click()
  97. time.sleep(5)
  98. self.get_videoList()
  99. time.sleep(1)
  100. self.driver.quit()
  101. def search_elements(self, xpath):
  102. time.sleep(1)
  103. windowHandles = self.driver.window_handles
  104. for handle in windowHandles:
  105. self.driver.switch_to.window(handle)
  106. time.sleep(1)
  107. try:
  108. elements = self.driver.find_elements(By.XPATH, xpath)
  109. if elements:
  110. return elements
  111. except NoSuchElementException:
  112. pass
  113. def check_to_applet(self, xpath):
  114. time.sleep(1)
  115. webViews = self.driver.contexts
  116. self.driver.switch_to.context(webViews[-1])
  117. windowHandles = self.driver.window_handles
  118. for handle in windowHandles:
  119. self.driver.switch_to.window(handle)
  120. time.sleep(1)
  121. try:
  122. self.driver.find_element(By.XPATH, xpath)
  123. Common.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
  124. return
  125. except NoSuchElementException:
  126. time.sleep(1)
  127. def swipe_up(self):
  128. self.search_elements('//*[@class="list-list--list"]')
  129. size = self.driver.get_window_size()
  130. self.driver.swipe(int(size["width"] * 0.5), int(size["height"] * 0.8),
  131. int(size["width"] * 0.5), int(size["height"] * 0.442), 200)
  132. self.swipe_count += 1
  133. def get_video_url(self, video_title_element):
  134. for i in range(3):
  135. self.search_elements('//*[@class="list-list--list"]')
  136. Common.logger(self.log_type, self.crawler).info(f"video_title_element:{video_title_element[0]}")
  137. time.sleep(1)
  138. Common.logger(self.log_type, self.crawler).info("滑动标题至可见状态")
  139. self.driver.execute_script("arguments[0].scrollIntoView({block:'center',inline:'center'});",
  140. video_title_element[0])
  141. time.sleep(3)
  142. Common.logger(self.log_type, self.crawler).info("点击标题")
  143. video_title_element[0].click()
  144. self.check_to_applet(xpath=r'//wx-video[@class="dynamic-index--video-item dynamic-index--video"]')
  145. Common.logger(self.log_type, self.crawler).info("点击标题完成")
  146. time.sleep(10)
  147. video_url_elements = self.search_elements(
  148. '//wx-video[@class="dynamic-index--video-item dynamic-index--video"]')
  149. if video_url_elements:
  150. return video_url_elements[0].get_attribute("src")
  151. def parse_detail(self, index):
  152. page_source = self.driver.page_source
  153. soup = BeautifulSoup(page_source, 'html.parser')
  154. soup.prettify()
  155. video_list = soup.findAll(name="wx-view", attrs={"class": "expose--adapt-parent"})
  156. index = index + 1
  157. element_list = [i for i in video_list][index:]
  158. return element_list[0]
  159. def get_video_info_2(self, video_element):
  160. Common.logger(self.log_type, self.crawler).info(f"本轮已抓取{self.download_cnt}条视频\n")
  161. if self.download_cnt >= int(self.rule_dict.get("videos_cnt", {}).get("min", 10)):
  162. self.count = 0
  163. self.download_cnt = 0
  164. self.element_list = []
  165. return
  166. self.count += 1
  167. Common.logger(self.log_type, self.crawler).info(f"第{self.count}条视频")
  168. # 标题
  169. video_title = video_element.find("wx-view", class_="dynamic--title").text
  170. # 用户名称
  171. user_name = video_element.find("wx-view", class_="dynamic--nick-top").text
  172. video_title_element = self.search_elements(f'//*[contains(text(), "{video_title}")]')
  173. if video_title_element is None:
  174. Common.logger(self.log_type, self.crawler).warning(
  175. f"未找到该视频标题的element:{video_title_element}")
  176. return
  177. Common.logger(self.log_type, self.crawler).info("点击标题,进入视频详情页")
  178. self.get_video_url(video_title_element)
  179. video_mid_elements = self.search_elements("//wx-view[@class='bar--navBar-content-capsule-wrap']")
  180. mid = int(video_mid_elements[0].get_attribute("data-mid"))
  181. repeat_video_id = self.repeat_video_id(mid)
  182. if repeat_video_id:
  183. Common.logger(self.log_type, self.crawler).info(f"该用户已经存在")
  184. # status = 0
  185. # self.insert_user(mid, user_name, data_list, status)
  186. self.driver.press_keycode(AndroidKey.BACK)
  187. return
  188. data_list = self.get_user_list(mid)
  189. if len(data_list) == 0:
  190. Common.logger(self.log_type, self.crawler).info(f"不满足抓取条件")
  191. self.driver.press_keycode(AndroidKey.BACK)
  192. return
  193. else:
  194. status = 1
  195. localtime = time.localtime(time.time())
  196. formatted_time = time.strftime("%Y-%m-%d", localtime)
  197. print(formatted_time)
  198. self.insert_user(mid, user_name, data_list, status, formatted_time)
  199. values = [[
  200. mid, user_name, formatted_time, data_list
  201. ]]
  202. Feishu.insert_columns('xiaoniangao', 'xiaoniangao', "8zlceR", "ROWS", 1, 2)
  203. time.sleep(0.5)
  204. Feishu.update_values('xiaoniangao', 'xiaoniangao', "8zlceR", "A2:Z2", values)
  205. Common.logger(self.log_type, self.crawler).info(f"{mid}:{user_name}写入成功")
  206. AliyunLogger.logging(
  207. code="1010",
  208. platform=self.platform,
  209. mode=self.log_type,
  210. env=self.env,
  211. message=f"{mid}:{user_name}入库",
  212. )
  213. self.driver.press_keycode(AndroidKey.BACK)
  214. time.sleep(2)
  215. def insert_user(self, mid, user_name, data_list, status, formatted_time):
  216. insert_sql = f"""insert into crawler_xng_userid( user_id , user_name , user_title_text , status, time) values ({mid},"{user_name}", "{data_list}",{status}, "{formatted_time}")"""
  217. print(insert_sql)
  218. MysqlHelper.update_values(self.log_type, self.crawler, insert_sql, self.env, action='')
  219. def get_user_list(self, mid):
  220. next_t = -1
  221. url = "https://kapi-xng-app.xiaoniangao.cn/v1/album/user_public"
  222. headers = {
  223. 'Host': 'kapi-xng-app.xiaoniangao.cn',
  224. 'content-type': 'application/json; charset=utf-8',
  225. 'accept': '*/*',
  226. 'authorization': 'hSNQ2s9pvPxvFn4LaQJxKQ6/7Is=',
  227. 'verb': 'POST',
  228. 'content-md5': 'c7b7f8663984e8800e3bcd9b44465083',
  229. 'x-b3-traceid': '2f9da41f960ae077',
  230. 'accept-language': 'zh-cn',
  231. 'date': 'Mon, 19 Jun 2023 06:41:17 GMT',
  232. 'x-token-id': '',
  233. 'x-signaturemethod': 'hmac-sha1',
  234. 'user-agent': 'xngapp/157 CFNetwork/1335.0.3.1 Darwin/21.6.0'
  235. }
  236. payload = {
  237. "token": "",
  238. "limit": 20,
  239. "start_t": next_t,
  240. "visited_mid": mid,
  241. "share_width": 300,
  242. "share_height": 240,
  243. }
  244. response = requests.request(
  245. "POST",
  246. url,
  247. headers=headers,
  248. data=json.dumps(payload),
  249. )
  250. data_list = []
  251. if "data" not in response.text or response.status_code != 200:
  252. return data_list
  253. elif "list" not in response.json()["data"]:
  254. return data_list
  255. elif len(response.json()["data"]["list"]) == 0:
  256. return data_list
  257. list = response.json()["data"]["list"]
  258. for video_obj in list:
  259. video_title = clean_title(video_obj.get("title", ""))
  260. # 发布时间
  261. publish_time_stamp = int(int(video_obj.get("t", 0)) / 1000)
  262. publish_time_str = time.strftime(
  263. "%Y-%m-%d", time.localtime(publish_time_stamp)
  264. )
  265. date_three_days_ago_string = (date.today() + timedelta(days=-7)).strftime("%Y-%m-%d")
  266. rule = publish_time_str >= date_three_days_ago_string
  267. if rule == False:
  268. return ""
  269. v_url = video_obj.get("v_url")
  270. data_list.append(video_title + ":" + v_url)
  271. return data_list
  272. def repeat_video_id(self,mid):
  273. sql = f"SELECT `link` FROM `crawler_user_v3` WHERE `source` = 'xiaoniangao' and `link` = {mid}"
  274. repeat_video_id = MysqlHelper.get_values(self.log_type, self.crawler, sql, self.env)
  275. return len(repeat_video_id)
  276. def get_video_info(self, video_element):
  277. try:
  278. self.get_video_info_2(video_element)
  279. except Exception as e:
  280. self.driver.press_keycode(AndroidKey.BACK)
  281. Common.logger(self.log_type, self.crawler).error(f"抓取单条视频异常:{e}\n")
  282. def get_videoList(self):
  283. self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
  284. self.driver.implicitly_wait(20)
  285. # 切换到 web_view
  286. self.check_to_applet(xpath='//*[@class="tab-bar--tab tab-bar--tab-selected"]')
  287. print("切换到 webview 成功")
  288. time.sleep(1)
  289. if self.search_elements('//*[@class="list-list--list"]') is None:
  290. Common.logger(self.log_type, self.crawler).info("窗口已销毁\n")
  291. self.count = 0
  292. self.download_cnt = 0
  293. self.element_list = []
  294. return
  295. print("开始获取视频信息")
  296. for i in range(50):
  297. print("下滑{}次".format(i))
  298. element = self.parse_detail(i)
  299. self.get_video_info(element)
  300. self.swipe_up()
  301. time.sleep(1)
  302. if self.swipe_count > 100:
  303. return
  304. print("下滑完成")
  305. Common.logger(self.log_type, self.crawler).info("已抓取完一组,休眠 5 秒\n")
  306. time.sleep(5)
  307. def run():
  308. rule_dict1 = {"period": {"min": 365, "max": 365},
  309. "duration": {"min": 30, "max": 1800},
  310. "favorite_cnt": {"min": 0, "max": 0},
  311. "videos_cnt": {"min": 5000, "max": 0},
  312. "share_cnt": {"min": 0, "max": 0}}
  313. XiaoNianGaoPlusRecommend("recommend", "xiaoniangao", "prod", rule_dict1, 6267141)
  314. if __name__ == "__main__":
  315. process = multiprocessing.Process(
  316. target=run
  317. )
  318. process.start()
  319. while True:
  320. if not process.is_alive():
  321. print("正在重启")
  322. process.terminate()
  323. time.sleep(60)
  324. os.system("adb forward --remove-all")
  325. process = multiprocessing.Process(target=run)
  326. process.start()
  327. time.sleep(60)