Forráskód Böngészése

Merge branch 'master' of https://git.yishihui.com/Server/automatic_crawler

zhangyong 1 éve
szülő
commit
67def8d139

+ 27 - 3
application/common/redis/pyredis.py

@@ -2,6 +2,7 @@
 Redis client Python
 @author luojunhui
 """
+import time
 
 import redis
 
@@ -9,14 +10,28 @@ import redis
 class RedisClient(object):
     """
     Redis client by python
+    Todo 如果 Redis 服务挂了,怎么做能够不影响业务
+    思路, 每次使用 redis 接口前先判断是否连接成功,如果连接失败则跳过 redis ,不影响全局
     """
 
     def __init__(self):
+        self.pool = None
         self.host = 'r-bp1mb0v08fqi4hjffu.redis.rds.aliyuncs.com'
         self.port = 6379
         self.db = 2
         self.password = 'Wqsd@2019'
-        self.pool = redis.Redis(host=self.host, port=self.port, db=self.db, password=self.password)
+
+    def connect(self):
+        """
+        connect to redis server
+        :return: bool
+        """
+        try:
+            self.pool = redis.Redis(host=self.host, port=self.port, db=self.db, password=self.password)
+            return True
+        except Exception as e:
+            print("connect to redis fail, the reason is {}".format(e))
+            return False
 
     def select(self, key):
         """
@@ -25,9 +40,18 @@ class RedisClient(object):
         """
         return self.pool.get(key)
 
-    def insert(self, key, value):
+    def insert(self, key, value, expire_time):
         """
         insert info from redis
         :return:
         """
-        self.pool.set(key, value)
+        self.pool.set(key, value, expire_time)
+
+    def delete(self, key):
+        """
+        delete key
+        :param key:
+        :return:
+        """
+        self.pool.delete(key)
+

+ 2 - 1
application/config/topic_group_queue.py

@@ -14,7 +14,8 @@ class TopicGroup(object):
             ("xsdd", 'recommend', 'xishiduoduo'),
             ("jxxf", 'recommend', 'jixiangxingfu'),
             ("xnght", 'recommend', 'xiaoniangaohuati'),
-            ('ynfqcz', 'recommend', 'yuannifuqichangzai')
+            ('ynfqcz', 'recommend', 'yuannifuqichangzai'),
+            ('zhdsn', 'recommend', 'zuihaodesongni')
         ]
 
     def produce(self):

+ 27 - 10
application/pipeline/pipeline.py

@@ -1,3 +1,4 @@
+import hashlib
 import re
 import sys
 import os
@@ -6,6 +7,7 @@ import time
 sys.path.append(os.getcwd())
 
 from application.common import MysqlHelper, AliyunLogger
+from application.common.redis.pyredis import RedisClient
 
 
 class PiaoQuanPipeline(object):
@@ -23,6 +25,7 @@ class PiaoQuanPipeline(object):
         self.mysql = MysqlHelper(env=env, mode=mode, platform=platform)
         self.aliyun_log = AliyunLogger(platform=platform, mode=mode, env=env)
         self.account = account
+        self.red = RedisClient()
 
     def publish_time_flag(self):
         """
@@ -159,11 +162,35 @@ class PiaoQuanPipeline(object):
             return False
         return True
 
+    def mq_exists(self):
+        """
+        检测 mq 是否已经发送过了
+        :return:
+        """
+        if self.red.connect():
+            index_txt = "{}-{}".format(self.platform, self.item['video_id'])
+            index_md5 = hashlib.md5(index_txt.encode()).hexdigest()
+            if self.red.select(index_md5):
+                self.aliyun_log.logging(
+                    code="2007",
+                    trace_id=self.trace_id,
+                    message="该视频 mq 已经发送"
+                )
+                return False
+            else:
+                self.red.insert(index_md5, int(time.time()), 43200)
+                return True
+        else:
+            return True
+
     def process_item(self):
         """
         全规则判断,符合规则的数据则return True
         :return:
         """
+        # 判断该 mq 是否已经发了
+        if not self.mq_exists():
+            return False
         if not self.publish_time_flag():
             # 记录相关日志
             return False
@@ -178,13 +205,3 @@ class PiaoQuanPipeline(object):
             return False
         return True
 
-
-# if __name__ == '__main__':
-#     sql_2 = f"""select create_time from crawler_video where video_id='18940470';"""
-#     Mysql = MysqlHelper(platform="xishiduoduo", mode="recommend")
-#     video_time = Mysql.select(sql=sql_2)
-#     print(video_time)
-#     print(video_time[0])
-#     print(video_time[0][0])
-#     print(type(video_time[0][0]))
-#     print(video_time[0][0].timestamp())

+ 9 - 8
application/pipeline/pipeline_dev.py

@@ -18,11 +18,11 @@ class PiaoQuanPipelineTest:
         update_time_stamp = self.item["update_time_stamp"]
         if self.platform == "gongzhonghao":
             if (
-                int(time.time()) - publish_time_stamp
-                > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
+                    int(time.time()) - publish_time_stamp
+                    > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
             ) and (
-                int(time.time()) - update_time_stamp
-                > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
+                    int(time.time()) - update_time_stamp
+                    > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
             ):
                 message = "发布时间超过{}天".format(
                     int(self.rule_dict.get("period", {}).get("max", 1000))
@@ -31,8 +31,8 @@ class PiaoQuanPipelineTest:
                 return False
         else:
             if (
-                int(time.time()) - publish_time_stamp
-                > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
+                    int(time.time()) - publish_time_stamp
+                    > 3600 * 24 * int(self.rule_dict.get("period", {}).get("max", 1000))
             ):
                 message = "发布时间超过{}天".format(
                     int(self.rule_dict.get("period", {}).get("max", 1000))
@@ -63,7 +63,7 @@ class PiaoQuanPipelineTest:
                     if int(self.rule_dict[key]["max"]) > 0
                     else 999999999999999
                 )
-                if key == "peroid": # peroid是抓取周期天数
+                if key == "peroid":  # peroid是抓取周期天数
                     continue
                 else:
                     flag = int(self.rule_dict[key]["min"]) <= int(self.item[key]) <= max_value
@@ -108,4 +108,5 @@ class PiaoQuanPipelineTest:
         if not self.download_rule_flag():
             # 记录相关日志
             return False
-        return True
+        return True
+

+ 2 - 1
spider/crawler_online/__init__.py

@@ -8,4 +8,5 @@ from .shayuzhufu import SharkZhuFuRecommend
 from .jiajiezhufuxishiduoduo import XiShiDuoDuoRecommend
 from .jixiangxingfu import JXXFRecommend
 from .xiaoniangaohuati import XNGHTecommend
-from .yuannifuqichangzai import YuanNiFuQiChangZai
+from .yuannifuqichangzai import YuanNiFuQiChangZai
+from .zuihaodesongni import ZuiHaoDeSongNi

+ 179 - 0
spider/crawler_online/zuihaodesongni.py

@@ -0,0 +1,179 @@
+"""
+@author: luojunhui
+"""
+import os
+import sys
+import json
+import time
+import uuid
+import random
+import datetime
+import requests
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.proxies import tunnel_proxies
+from application.common.log import AliyunLogger
+
+
+class ZuiHaoDeSongNi(object):
+    """
+    最好的送你——推荐爬虫
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(platform=self.platform, mode=self.mode)
+
+    def process_video_obj(self, video_obj):
+        """
+        处理每一个视频内容
+        :return: None
+        """
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        publish_time_stamp = int(video_obj["update_time"])
+        publish_time_str = datetime.datetime.fromtimestamp(publish_time_stamp).strftime(
+            "%Y-%m-%d %H:%M:%S"
+        )
+        item = VideoItem()
+        item.add_video_info("user_id", our_user["uid"])
+        item.add_video_info("user_name", our_user["nick_name"])
+        item.add_video_info("video_id", video_obj["nid"])
+        item.add_video_info("video_title", video_obj["title"])
+        item.add_video_info("publish_time_str", publish_time_str)
+        item.add_video_info("publish_time_stamp", int(publish_time_stamp))
+        item.add_video_info("video_url", video_obj["video_url"])
+        item.add_video_info("cover_url", video_obj["video_cover"])
+        item.add_video_info("out_video_id", video_obj["nid"])
+        item.add_video_info("platform", self.platform)
+        item.add_video_info("strategy", self.mode)
+        item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+        mq_obj = item.produce_item()
+        pipeline = PiaoQuanPipeline(
+            platform=self.platform,
+            mode=self.mode,
+            rule_dict=self.rule_dict,
+            env=self.env,
+            item=mq_obj,
+            trace_id=trace_id,
+        )
+        if pipeline.process_item():
+            self.download_cnt += 1
+            self.mq.send_msg(mq_obj)
+            # 随机等待 5 分钟
+            time.sleep(60 * random.randint(1, 5))
+            self.aliyun_log.logging(
+                code="1002",
+                message="成功发送至 ETL",
+                data=mq_obj,
+            )
+            if self.download_cnt >= int(
+                self.rule_dict.get("videos_cnt", {}).get("min", 200)
+            ):
+                self.expire_flag = True
+
+    def get_recommend_list(self, page_index):
+        """
+        获取推荐页面的video_list
+        :param page_index: 页码
+        :return: None
+        """
+        if self.expire_flag:
+            self.aliyun_log.logging(
+                code="2000",
+                message="本轮已经抓取到足够的数据,自动退出\t{}".format(self.download_cnt),
+            )
+            return
+        headers = {
+            'Host': 'zhdsn.wentingyou.cn',
+            "content-time": str(int(time.time() * 1000)),
+            "cache-time": str(int(time.time() * 1000)),
+            'chatkey': 'wx00da988283a73cdf',
+            'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/6.8.0(0x16080000) NetType/WIFI MiniProgramEnv/Mac MacWechat/WMPF MacWechat/3.8.6(0x13080610) XWEB/1156',
+            'content-type': 'application/x-www-form-urlencoded',
+            'visitorkey': '17096941221026589978',
+            'xweb_xhr': '1',
+            'vision': '1.1.0',
+            'token': '',
+            'accept': '*/*',
+            'sec-fetch-site': 'cross-site',
+            'sec-fetch-mode': 'cors',
+            'sec-fetch-dest': 'empty',
+            'referer': 'https://servicewechat.com/wx00da988283a73cdf/7/page-frame.html',
+            'accept-language': 'en-US,en;q=0.9'
+        }
+        po = {
+            "cid": "",
+            "page": page_index,
+            "is_ads": 1,
+            "model": random.choice(
+                [
+                    "Windows",
+                    "Mac",
+                    "HuaWei",
+                    "Xiaomi",
+                    "Xiaomi2",
+                    "Yandex",
+                    "Google",
+                    "iphone",
+                    "oppo",
+                ]
+            ),
+            "mini_version": "3.8.6",
+            "ini_id": "17096941221026589978"
+        }
+        params = {"parameter": json.dumps(po)}
+        url = "https://zhdsn.wentingyou.cn/index.php/v111/index/index"
+        time.sleep(5)
+        response = requests.request(
+            "GET", url=url, headers=headers, params=params, proxies=tunnel_proxies()
+        )
+        data = response.json()
+        for index, video_obj in enumerate(data["data"]["list"], 1):
+            try:
+                self.aliyun_log.logging(
+                    code="1001",
+                    message="扫描到一条视频",
+                    data=video_obj,
+                )
+                self.process_video_obj(video_obj)
+            except Exception as e:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取第{}条的时候出现问题, 报错信息是{}".format(index, e),
+                )
+
+    def run(self):
+        """
+        执行代码
+        :return: None
+        """
+        for page in range(1, 40):
+            if self.expire_flag:
+                self.aliyun_log.logging(
+                    code="2000",
+                    message="本轮已经抓取到足够的数据,自动退出\t{}".format(self.download_cnt),
+                )
+                return
+            else:
+                # self.get_recommend_list(page_index=page)
+                try:
+                    self.get_recommend_list(page_index=page)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取第{}页时候出现错误, 报错信息是{}".format(page, e),
+                    )
+
+

+ 5 - 1
spider/spider_map.py

@@ -47,9 +47,13 @@ spider_map = {
     "xiaoniangaohuati": {
         "recommend": XNGHTecommend
     },
-    # 福气旺系列
+    # 福气旺系列, 愿你福气常在
     "yuannifuqichangzai": {
         "recommend": YuanNiFuQiChangZai
+    },
+    # 福气旺系列, 最好的送你
+    "zuihaodesongni": {
+        "recommend": ZuiHaoDeSongNi
     }
 
 }