Przeglądaj źródła

小年糕+ 日志分析

罗俊辉 1 rok temu
rodzic
commit
7af4a65824

+ 18 - 2
app/off_line_controler.py

@@ -10,10 +10,18 @@ from scheduler import SpiderHome
 
 
 class SpiderScheduler(object):
+    """
+    线下爬虫调度器
+    """
     SH = SpiderHome()
 
     @classmethod
     def protect_spider_timeout(cls, function, hour):
+        """
+        守护进程,在程序启动后的某一个时段内守护爬虫进程
+        :param function: 被守护的函数
+        :param hour: 守护时长 / hour
+        """
         run_time_limit = hour * 3600
         start_time = time.time()
         process = multiprocessing.Process(target=function)
@@ -23,7 +31,6 @@ class SpiderScheduler(object):
                 process.terminate()
                 break
             if not process.is_alive():
-                print("正在重启")
                 process.terminate()
                 time.sleep(60)
                 os.system("adb forward --remove-all")
@@ -33,6 +40,10 @@ class SpiderScheduler(object):
 
     @classmethod
     def run_xng_plus(cls, hour):
+        """
+        “小年糕+”推荐爬虫
+        :param hour:
+        """
         cls.protect_spider_timeout(function=cls.SH.run_xng_plus, hour=hour)
 
     @classmethod
@@ -41,6 +52,10 @@ class SpiderScheduler(object):
 
     @classmethod
     def run_spss(cls, hour):
+        """
+        "视频刷刷"推荐爬虫
+        :param hour:
+        """
         cls.protect_spider_timeout(function=cls.SH.run_spss, hour=hour)
 
     @classmethod
@@ -50,7 +65,8 @@ class SpiderScheduler(object):
 
 if __name__ == "__main__":
     SC = SpiderScheduler()
-    SC.run_spss(hour=5)
+    SC.run_xng_plus(hour=100)
+    # SC.run_spss(hour=5)
     # # schedule.every().day.at("20:06").do(SC.run_xng_plus, hour=1)
     # schedule.every().day.at("20:30").do(SC.run_spss, hour=1)
     #

+ 26 - 0
application/functions/appium_tools.py

@@ -0,0 +1,26 @@
+"""
+Appium 的一些公共方法
+"""
+import time
+from selenium.webdriver.common.by import By
+from selenium.common.exceptions import NoSuchElementException
+
+
+def search_elements(driver, xpath):
+    """
+    获取元素
+    :param driver:
+    :param xpath:
+    :return:
+    """
+    time.sleep(1)
+    windowHandles = driver.window_handles
+    for handle in windowHandles:
+        driver.switch_to.window(handle)
+        time.sleep(1)
+        try:
+            elements = driver.find_elements(By.XPATH, xpath)
+            if elements:
+                return elements
+        except NoSuchElementException:
+            pass

+ 10 - 8
scheduler/spider_scheduler.py

@@ -1,6 +1,6 @@
 import os
 import sys
-
+import time
 
 sys.path.append(os.getcwd())
 
@@ -17,13 +17,15 @@ class SpiderHome(object):
             "videos_cnt": {"min": 5000, "max": 0},
             "share_cnt": {"min": 0, "max": 0},
         }
-        XiaoNianGaoPlusRecommend(
-            "recommend",
-            "xiaoniangaoplus",
-            "prod",
-            rule_dict1,
-            [64120158, 64120157, 63676778],
-        )
+        while True:
+            XiaoNianGaoPlusRecommend(
+                "recommend",
+                "xiaoniangaoplus",
+                "prod",
+                rule_dict1,
+                [64120158, 64120157, 63676778],
+            )
+            time.sleep(600)
 
     @classmethod
     def run_spss(cls):

+ 172 - 0
spider/crawler_offline/shipinhao_search.py

@@ -0,0 +1,172 @@
+"""
+视频号搜索功能
+"""
+import json
+import os
+import random
+import sys
+import time
+import uuid
+from hashlib import md5
+
+from appium import webdriver
+from appium.webdriver.extensions.android.nativekey import AndroidKey
+from bs4 import BeautifulSoup
+from selenium.common.exceptions import NoSuchElementException
+from selenium.webdriver.common.by import By
+import multiprocessing
+
+sys.path.append(os.getcwd())
+
+from application.common.log import AliyunLogger, Local
+from application.common.messageQueue import MQ
+from application.functions import get_redirect_url
+from application.pipeline import PiaoQuanPipeline
+
+
+class ShiPinHaoSearch(object):
+    """
+    视频号搜索爬虫
+    """
+
+    def __init__(self, platform, mode, env, rule_dict, our_uid):
+        self.mq = MQ(topic_name="topic_crawler_etl_" + env)
+        self.download_cnt = 0
+        self.element_list = []
+        self.count = 0
+        self.swipe_count = 0
+        self.platform = platform
+        self.mode = mode
+        self.env = env
+        self.rule_dict = rule_dict
+        self.our_uid = our_uid
+        chromedriverExecutable = "/usr/bin/chromedriver"
+        self.aliyun_log = AliyunLogger(platform=platform, mode=mode, env=env)
+        Local.logger(platform=self.platform, mode=self.mode).info("启动微信")
+        # 微信的配置文件
+        caps = {
+            "platformName": "Android",
+            "devicesName": "Android",
+            "appPackage": "com.tencent.mm",
+            "appActivity": ".ui.LauncherUI",
+            "autoGrantPermissions": True,
+            "noReset": True,
+            "resetkeyboard": True,
+            "unicodekeyboard": True,
+            "showChromedriverLog": True,
+            "printPageSourceOnFailure": True,
+            "recreateChromeDriverSessions": True,
+            "enableWebviewDetailsCollection": True,
+            "setWebContentsDebuggingEnabled": True,
+            "newCommandTimeout": 6000,
+            "automationName": "UiAutomator2",
+            "chromedriverExecutable": chromedriverExecutable,
+            "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
+        }
+        try:
+            self.driver = webdriver.Remote("http://localhost:4750/wd/hub", caps)
+        except Exception as e:
+            print(e)
+            self.aliyun_log.logging(
+                code="3002",
+                message=f'appium 启动异常: {e}'
+            )
+            return
+        self.driver.implicitly_wait(30)
+
+        for i in range(120):
+            try:
+                if self.driver.find_elements(By.ID, "com.tencent.mm:id/f2s"):
+                    Local.logger(self.log_type, self.crawler).info("微信启动成功")
+                    # Common.logging(self.log_type, self.crawler, self.env, '微信启动成功')
+                    self.aliyun_log.logging(
+                        code="1000",
+                        message="启动微信成功"
+                    )
+                    break
+                elif self.driver.find_element(By.ID, "com.android.systemui:id/dismiss_view"):
+                    Local.logger(self.log_type, self.crawler).info("发现并关闭系统下拉菜单")
+                    # Common.logging(self.log_type, self.crawler, self.env, '发现并关闭系统下拉菜单')
+                    self.aliyun_log.logging(
+                        code="1000",
+                        message="发现并关闭系统下拉菜单"
+                    )
+                    size = self.driver.get_window_size()
+                    self.driver.swipe(int(size['width'] * 0.5), int(size['height'] * 0.8),
+                                      int(size['width'] * 0.5), int(size['height'] * 0.2), 200)
+                else:
+                    pass
+            except NoSuchElementException:
+                self.aliyun_log.logging(
+                    code="3001",
+                    message="打开微信异常"
+                )
+                time.sleep(1)
+
+    def search(self, keyword):
+        """搜索"""
+        self.driver.find_element('com.tencent.mm:id/j5t').click()
+        time.sleep(1)
+        self.driver.find_element('com.tencent.mm:id/cd7').clear().send_keys(keyword)
+        self.driver.press_keycode(AndroidKey.ENTER)
+        time.sleep(5)
+        # 切换到 webview
+        self.check_to_webview(xpath='//div[@class="unit"]')
+        time.sleep(1)
+        # 切换到“视频号”分类
+        shipinhao_tags = self.find_elements_by_xpath('//div[@class="unit"]/*[2]')
+        Local.logger(platform=self.platform, mode=self.mode).info("点击视频号分类")
+        shipinhao_tags[0].click()
+
+        index = 0
+        while True:
+            if not self.find_elements_by_xpath('//*[@class="mixed-box__bd"]'):
+                Local.logger(self.platform, self.mode).info("窗口已销毁")
+                return
+            Local.logger(self.platform, self.mode).info("开始获取视频列表")
+
+            video_list = self.find_elements_by_xpath('//div[@class="rich-media active__absolute"]')
+            if video_list:
+                print(video_list)
+
+    def check_to_webview(self, xpath):
+        """
+        切换到了 webview
+        :param xpath:
+        :return:
+        """
+        webViews = self.driver.contexts
+        self.driver.switch_to.context(webViews[-1])
+        windowHandles = self.driver.window_handles
+        for handle in windowHandles:
+            self.driver.switch_to.window(handle)
+            time.sleep(1)
+            try:
+                self.driver.find_element(By.XPATH, xpath)
+                Local.logger(self.log_type, self.crawler).info("切换到WebView成功\n")
+                # Common.logging(self.log_type, self.crawler, self.env, '切换到WebView成功\n')
+                self.aliyun_log.logging(
+                    code="1000",
+                    message="成功切换到 webview"
+                )
+                return
+            except NoSuchElementException:
+                time.sleep(1)
+
+    def find_elements_by_xpath(self, xpath):
+        """
+        通过 xpath 获取 Element
+        :param xpath:
+        :return:
+        """
+        windowHandles = self.driver.window_handles
+        for handle in windowHandles:
+            self.driver.switch_to.window(handle)
+            time.sleep(1)
+            try:
+                elements = self.driver.find_elements(By.XPATH, xpath)
+                if elements:
+                    return elements
+            except NoSuchElementException as e:
+                Local.logger(platform=self.platform, mode=self.mode).info("未找到元素{}".format(xpath))
+                return None

+ 63 - 45
spider/crawler_offline/xiaoniangao_plus.py

@@ -3,6 +3,7 @@
 # @Time: 2023/12/18
 import json
 import os
+import random
 import sys
 import time
 import uuid
@@ -19,15 +20,19 @@ sys.path.append(os.getcwd())
 from application.functions import get_redirect_url
 from application.pipeline import PiaoQuanPipelineTest
 from application.common.messageQueue import MQ
-from application.common.log import AliyunLogger
+from application.common.log import Local, AliyunLogger
 
 
 class XiaoNianGaoPlusRecommend(object):
+    """
+    小年糕+线下爬虫
+    """
+
     def __init__(self, log_type, crawler, env, rule_dict, our_uid):
         self.mq = None
         self.platform = "xiaoniangaoplus"
         self.download_cnt = 0
-        self.element_list = [ ]
+        self.element_list = []
         self.count = 0
         self.swipe_count = 0
         self.log_type = log_type
@@ -35,7 +40,7 @@ class XiaoNianGaoPlusRecommend(object):
         self.env = env
         self.rule_dict = rule_dict
         self.our_uid = our_uid
-        chromedriverExecutable = "/usr/bin/chromedriver"
+        chromedriverExecutable = "/Users/luojunhui/Downloads/chromedriver_mac_116/chromedriver"
         print("启动微信")
         # 微信的配置文件
         caps = {
@@ -43,7 +48,7 @@ class XiaoNianGaoPlusRecommend(object):
             "devicesName": "Android",
             "appPackage": "com.tencent.mm",
             "appActivity": ".ui.LauncherUI",
-            "autoGrantPermissions": "true",
+            "autoGrantPermissions": True,
             "noReset": True,
             "resetkeyboard": True,
             "unicodekeyboard": True,
@@ -58,7 +63,7 @@ class XiaoNianGaoPlusRecommend(object):
             "chromeOptions": {"androidProcess": "com.tencent.mm:appbrand0"},
         }
         try:
-            self.driver = webdriver.Remote("http://localhost:4750/wd/hub", caps)
+            self.driver = webdriver.Remote("http://localhost:4723/wd/hub", caps)
         except Exception as e:
             print(e)
             return
@@ -245,42 +250,51 @@ class XiaoNianGaoPlusRecommend(object):
             "cover_url": cover_url,
             "session": f"xiaoniangao-{int(time.time())}",
         }
-        pipeline = PiaoQuanPipelineTest(
-            platform=self.crawler,
-            mode=self.log_type,
-            item=video_dict,
-            rule_dict=self.rule_dict,
-            env=self.env,
-            trace_id=trace_id,
+        print(json.dumps(video_dict, ensure_ascii=False, indent=4))
+        Local.logger(platform=self.platform, mode=self.log_type).info(
+            "scan_data_" + json.dumps(video_dict, ensure_ascii=False))
+        AliyunLogger(platform=self.platform, mode=self.log_type).logging(
+            code="7000",
+            message="监控到一条视频",
+            data=video_dict
         )
-        flag = pipeline.process_item()
-        if flag:
-            video_title_element = self.search_elements(
-                f'//*[contains(text(), "{video_title}")]'
-            )
-            if video_title_element is None:
-                return
-            print("点击标题,进入视频详情页")
-            video_url = self.get_video_url(video_title_element)
-            print(video_url)
-            video_url = get_redirect_url(video_url)
-            print(video_url)
-            if video_url is None:
-                self.driver.press_keycode(AndroidKey.BACK)
-                time.sleep(5)
-                return
-            video_dict["video_url"] = video_url
-            video_dict["platform"] = self.crawler
-            video_dict["strategy"] = self.log_type
-            video_dict["out_video_id"] = video_dict["video_id"]
-            video_dict["crawler_rule"] = json.dumps(self.rule_dict)
-            video_dict["user_id"] = self.our_uid
-            video_dict["publish_time"] = video_dict["publish_time_str"]
-            print(json.dumps(video_dict, ensure_ascii=False, indent=4))
-            self.download_cnt += 1
-            self.driver.press_keycode(AndroidKey.BACK)
-            time.sleep(5)
 
+    #     pipeline = PiaoQuanPipelineTest(
+    #         platform=self.crawler,
+    #         mode=self.log_type,
+    #         item=video_dict,
+    #         rule_dict=self.rule_dict,
+    #         env=self.env,
+    #         trace_id=trace_id,
+    #     )
+    #     flag = pipeline.process_item()
+    #     if flag:
+    #         video_title_element = self.search_elements(
+    #             f'//*[contains(text(), "{video_title}")]'
+    #         )
+    #         if video_title_element is None:
+    #             return
+    #         print("点击标题,进入视频详情页")
+    #         video_url = self.get_video_url(video_title_element)
+    #         print(video_url)
+    #         video_url = get_redirect_url(video_url)
+    #         print(video_url)
+    #         if video_url is None:
+    #             self.driver.press_keycode(AndroidKey.BACK)
+    #             time.sleep(5)
+    #             return
+    #         video_dict["video_url"] = video_url
+    #         video_dict["platform"] = self.crawler
+    #         video_dict["strategy"] = self.log_type
+    #         video_dict["out_video_id"] = video_dict["video_id"]
+    #         video_dict["crawler_rule"] = json.dumps(self.rule_dict)
+    #         video_dict["user_id"] = self.our_uid
+    #         video_dict["publish_time"] = video_dict["publish_time_str"]
+    #         print(json.dumps(video_dict, ensure_ascii=False, indent=4))
+    #         self.download_cnt += 1
+    #         self.driver.press_keycode(AndroidKey.BACK)
+    #         time.sleep(5)
+    #
     def get_video_info(self, video_element):
         try:
             self.get_video_info_2(video_element)
@@ -289,12 +303,16 @@ class XiaoNianGaoPlusRecommend(object):
             print(f"抓取单条视频异常:{e}\n")
 
     def get_videoList(self):
+        """
+        获取视频列表
+        :return:
+        """
+        # while True:
         self.driver.implicitly_wait(20)
         # 切换到 web_view
         self.check_to_applet(xpath='//*[@class="tab-bar--tab tab-bar--tab-selected"]')
         print("切换到 webview 成功")
         time.sleep(1)
-        page = 0
         if self.search_elements('//*[@class="list-list--list"]') is None:
             print("窗口已销毁")
             self.count = 0
@@ -308,8 +326,8 @@ class XiaoNianGaoPlusRecommend(object):
             element = self.parse_detail(i)
             self.get_video_info(element)
             self.swipe_up()
-            time.sleep(1)
-            if self.swipe_count > 100:
-                return
-        print("已抓取完一组,休眠 5 秒\n")
-        time.sleep(5)
+            time.sleep(random.randint(1, 5))
+            # if self.swipe_count > 100:
+            #     return
+        print("已抓取完一组,休眠 600 秒\n")
+        # time.sleep(600)

+ 22 - 5
spider/crawler_online/xiaoniangao.py

@@ -36,6 +36,13 @@ class XiaoNianGaoAuthor(object):
         self.expire_flag = False
         self.aliyun_log = AliyunLogger(platform=self.platform, mode=self.mode)
 
+    def split_accounts(self):
+        """
+        操作 user_list,把重要账号挑选出来
+        :return:
+        """
+        return self.user_list
+
     async def get_user_videos(self, user_dict):
         """
         小年糕执行代码
@@ -67,11 +74,21 @@ class XiaoNianGaoAuthor(object):
                     "share_width": 300,
                     "share_height": 240,
                 }
-                async with session.post(
-                    url,
-                    headers=headers,
-                    data=json.dumps(payload)
-                ) as response:
+                async with session.post(url, headers=headers, data=json.dumps(payload)) as response:
                     data = await response.json()
+                print(data)
 
+    async def scan_important_accounts(self, accounts):
+        """
+        批量扫描重要账号
+        :param accounts:重要账号
+        """
+        tasks = [self.get_user_videos(account) for account in accounts]
+        await asyncio.gather(*tasks)
 
+    async def run(self):
+        """
+        控制函数代码
+        :return:
+        """
+        self.split_acoounts()