zhangyong 3 tháng trước cách đây
mục cha
commit
776897abff

+ 4 - 0
application/config/topic_group_queue.py

@@ -32,6 +32,10 @@ class TopicGroup(object):
             ('jrzfhkfg', 'recommend', 'jierizhufuhuakaifugui'),
             ('ynfqmm', 'recommend', 'yuannifuqimanman'),
             ('hyzfd', 'recommend', 'haoyunzhufuduo'),
+            ('jrzfxfjx', 'recommend', 'jierizhufuxingfujixiang'),
+            ('hysp', 'recommend', 'haoyoushipin'),
+            ('qz', 'recommend', 'quzhuan'),
+            ('zfdwh', 'recommend', 'zhufudewenhou'),
         ]
 
     def produce(self):

+ 1 - 1
application/pipeline/pipeline.py

@@ -184,7 +184,7 @@ class PiaoQuanPipeline(object):
 
         if self.platform == "zhufuniannianshunxinjixiang" or  self.platform == "weiquanshipin" or  self.platform == "piaoquangushi" or  self.platform == "lepaoledong" or  self.platform == "zhufukuaizhuan" or self.platform == "linglingkuailezhufu" or self.platform == "lepaoledongdijie":
             return True
-        if self.platform == "jierizhufuhuakaifugui" or self.platform == "yuannifuqimanman" or self.platform == "haoyunzhufuduo":
+        if self.platform == "jierizhufuhuakaifugui" or self.platform == "yuannifuqimanman" or self.platform == "haoyunzhufuduo" or self.platform == "quzhuan" or self.platform == "zhufudewenhou" or self.platform == "jierizhufuxingfujixiang" or self.platform == "haoyoushipin":
             return True
         if self.platform == "zhuwanwufusunew" and self.mode == "recommend":
             return True

+ 197 - 0
spider/crawler_online/haoyoushipin.py

@@ -0,0 +1,197 @@
+import os
+import random
+import sys
+import time
+import uuid
+import json
+from datetime import datetime
+
+import cv2
+import requests
+
+from application.common.feishu import FsData
+from application.common.feishu.feishu_utils import FeishuUtils
+from application.common.gpt import GPT4oMini
+from application.common.mysql.sql import Sql
+from application.common.redis.xng_redis import xng_in_video_data
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.log import AliyunLogger
+from application.common.mysql import MysqlHelper
+
+
+
+class HYSPRecommend(object):
+
+    """
+    好友视频
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.limit_flag = False
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
+        self.mysql = MysqlHelper(mode=self.mode, platform=self)
+
+
+    def get_recommend_list(self):
+        print("好友视频")
+
+        """
+        获取推荐页视频
+        """
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        cursor  = ""
+        url = "http://8.217.192.46:8889/crawler/hao_you_shi_pin/recommend"
+        data_rule = FsData()
+        title_rule = data_rule.get_title_rule()
+        while True:
+            payload = json.dumps({
+                "cursor": cursor
+            })
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取单条视频失败,请求失败"
+                ),
+                return
+            cursor = response['data']["next_cursor"]
+            data = response['data']['data']
+            if len(data) == 0:
+                return
+            for index, video_obj in enumerate(data, 1):
+                try:
+                    self.aliyun_log.logging(
+                        code="1001", message="扫描到一条视频", data=video_obj
+                    )
+                    self.process_video_obj(video_obj,title_rule)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取单条视频失败, 该视频位于第{}页第{}条报错原因是{}".format(
+                            1, index, e
+                        ),
+                    )
+                if self.limit_flag:
+                    return
+                time.sleep(random.randint(1, 5))
+
+    def process_video_obj(self, video_obj,title_rule):
+        """
+        处理视频
+        :param video_obj:
+        """
+        time.sleep(random.randint(3, 8))
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        video_url = self.get_video_url(video_obj["vid"])
+        if video_url:
+            item = VideoItem()
+            item.add_video_info("video_id", video_obj["id"])
+            item.add_video_info("video_title", video_obj["vtitle"])
+            item.add_video_info("play_cnt", 0)
+            item.add_video_info("publish_time_stamp", int(time.time()))
+            item.add_video_info("out_user_id", video_obj["id"])
+            item.add_video_info("cover_url", "https://qiniu.818ao.com/"+video_obj["poster"])
+            item.add_video_info("like_cnt", 0)
+            item.add_video_info("share_cnt", 0)
+            item.add_video_info("comment_cnt", 0)
+            item.add_video_info("video_url", video_url)
+            item.add_video_info("out_video_id", video_obj["id"])
+            item.add_video_info("platform", self.platform)
+            item.add_video_info("strategy", self.mode)
+            item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+            item.add_video_info("user_id", our_user["uid"])
+            item.add_video_info("user_name", our_user["nick_name"])
+            mq_obj = item.produce_item()
+            pipeline = PiaoQuanPipeline(
+                platform=self.platform,
+                mode=self.mode,
+                rule_dict=self.rule_dict,
+                env=self.env,
+                item=mq_obj,
+                trace_id=trace_id,
+            )
+            if pipeline.process_item():
+                title_list = title_rule.split(",")
+                title = video_obj["title"]
+                contains_keyword = any(keyword in title for keyword in title_list)
+                if contains_keyword:
+                    new_title = GPT4oMini.get_ai_mini_title(title)
+                    if new_title:
+                        item.add_video_info("video_title", new_title)
+                        current_time = datetime.now()
+                        formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
+                        values = [
+                            [
+                                video_url,
+                                video_obj["cover"],
+                                title,
+                                new_title,
+                                formatted_time,
+                            ]
+                        ]
+                        FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "8c7191", "ROWS", 1, 2)
+                        time.sleep(0.5)
+                        FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "8c7191", "A2:Z2", values)
+                self.download_cnt += 1
+                self.mq.send_msg(mq_obj)
+                self.aliyun_log.logging(code="1002", message="成功发送至 ETL", data=mq_obj)
+                if self.download_cnt >= int(
+                        self.rule_dict.get("videos_cnt", {}).get("min", 200)
+                ):
+                    self.limit_flag = True
+
+    """获取视频链接"""
+    def get_video_url(self, vid):
+        url = "http://8.217.192.46:8889/crawler/hao_you_shi_pin/detail"
+
+        payload = json.dumps({
+            "content_id": f"{vid}"
+        })
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        try:
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="获取视频链接失败"
+                ),
+                return None
+            video_url = response['data']['data']['video_url_list'][0]['video_url']
+            return video_url
+        except Exception as e:
+            return None
+
+    def run(self):
+        self.get_recommend_list()
+
+
+if __name__ == '__main__':
+    J = HYSPRecommend(
+        platform="haoyoushipin",
+        mode="recommend",
+        rule_dict={},
+        user_list=[{'uid': "123456", 'nick_name': "xiaoxiao"}],
+
+    )
+    J.get_recommend_list()
+    # J.logic()

+ 170 - 0
spider/crawler_online/jierizhufuxingfujixiang.py

@@ -0,0 +1,170 @@
+import os
+import random
+import sys
+import time
+import uuid
+import json
+from datetime import datetime
+
+import cv2
+import requests
+
+from application.common.feishu import FsData
+from application.common.feishu.feishu_utils import FeishuUtils
+from application.common.gpt import GPT4oMini
+from application.common.mysql.sql import Sql
+from application.common.redis.xng_redis import xng_in_video_data
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.log import AliyunLogger
+from application.common.mysql import MysqlHelper
+
+
+
+class JRZFXFJXRecommend(object):
+
+    """
+    节日祝福幸福吉祥
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.limit_flag = False
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
+        self.mysql = MysqlHelper(mode=self.mode, platform=self)
+
+
+    def get_recommend_list(self):
+        print("喜鹊波-节日祝福幸福吉祥")
+
+        """
+        获取推荐页视频
+        """
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        url = "http://8.217.192.46:8889/crawler/jie_ri_zhu_fu_xing_fu_ji_xiang/recommend"
+        cursor  = ""
+        data_rule = FsData()
+        title_rule = data_rule.get_title_rule()
+        while True:
+            payload = json.dumps({
+                "cursor": cursor
+            })
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取单条视频失败,请求失败"
+                ),
+                return
+            data = response['data']['data']
+            if len(data) == 0:
+                return
+            for index, video_obj in enumerate(data, 1):
+                try:
+                    self.aliyun_log.logging(
+                        code="1001", message="扫描到一条视频", data=video_obj
+                    )
+                    self.process_video_obj(video_obj, title_rule)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取单条视频失败, 该视频位于第{}页第{}条报错原因是{}".format(
+                            1, index, e
+                        ),
+                    )
+                if self.limit_flag:
+                    return
+                time.sleep(random.randint(1, 5))
+
+    def process_video_obj(self, video_obj, title_rule):
+        """
+        处理视频
+        :param video_obj:
+        """
+        time.sleep(random.randint(3, 8))
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        item = VideoItem()
+        item.add_video_info("video_id", video_obj["id"])
+        item.add_video_info("video_title", video_obj["title"])
+        item.add_video_info("play_cnt", 0)
+        item.add_video_info("publish_time_stamp", int(time.time()))
+        item.add_video_info("out_user_id", video_obj["id"])
+        item.add_video_info("cover_url", video_obj["images"])
+        item.add_video_info("like_cnt", 0)
+        item.add_video_info("share_cnt", 0)
+        item.add_video_info("comment_cnt", 0)
+        item.add_video_info("video_url", video_obj["video_url"])
+        item.add_video_info("out_video_id", video_obj["id"])
+        item.add_video_info("platform", self.platform)
+        item.add_video_info("strategy", self.mode)
+        item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+        item.add_video_info("user_id", our_user["uid"])
+        item.add_video_info("user_name", our_user["nick_name"])
+        mq_obj = item.produce_item()
+        pipeline = PiaoQuanPipeline(
+            platform=self.platform,
+            mode=self.mode,
+            rule_dict=self.rule_dict,
+            env=self.env,
+            item=mq_obj,
+            trace_id=trace_id,
+        )
+        if pipeline.process_item():
+            title_list = title_rule.split(",")
+            title = video_obj["title"]
+            contains_keyword = any(keyword in title for keyword in title_list)
+            if contains_keyword:
+                new_title = GPT4oMini.get_ai_mini_title(title)
+                if new_title:
+                    item.add_video_info("video_title", new_title)
+                    current_time = datetime.now()
+                    formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
+                    values = [
+                        [
+                            video_obj["urls"][0],
+                            video_obj["cover_url"],
+                            title,
+                            new_title,
+                            formatted_time,
+                        ]
+                    ]
+                    FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "rcQv7r", "ROWS", 1, 2)
+                    time.sleep(0.5)
+                    FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "rcQv7r", "A2:Z2", values)
+            self.download_cnt += 1
+            self.mq.send_msg(mq_obj)
+            self.aliyun_log.logging(code="1002", message="成功发送至 ETL", data=mq_obj)
+            if self.download_cnt >= int(
+                    self.rule_dict.get("videos_cnt", {}).get("min", 200)
+            ):
+                self.limit_flag = True
+
+    def run(self):
+        self.get_recommend_list()
+
+
+if __name__ == '__main__':
+    J = JRZFXFJXRecommend(
+        platform="jierizhufuxingfujixiang",
+        mode="recommend",
+        rule_dict={},
+        user_list=[{'uid': "123456", 'nick_name': "xiaoxiao"}],
+
+    )
+    J.get_recommend_list()
+    # J.logic()

+ 170 - 0
spider/crawler_online/quzhuan.py

@@ -0,0 +1,170 @@
+import os
+import random
+import sys
+import time
+import uuid
+import json
+from datetime import datetime
+
+import cv2
+import requests
+
+from application.common.feishu import FsData
+from application.common.feishu.feishu_utils import FeishuUtils
+from application.common.gpt import GPT4oMini
+from application.common.mysql.sql import Sql
+from application.common.redis.xng_redis import xng_in_video_data
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.log import AliyunLogger
+from application.common.mysql import MysqlHelper
+
+
+
+class QZRecommend(object):
+
+    """
+    趣转
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.limit_flag = False
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
+        self.mysql = MysqlHelper(mode=self.mode, platform=self)
+
+
+    def get_recommend_list(self):
+        print("趣转开始")
+
+        """
+        获取推荐页视频
+        """
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        url = "http://8.217.192.46:8889/crawler/qu_zhuan/recommend"
+        # url = "http://8.217.192.46:8889/crawler/le_pao_le_dong/recommend"
+        data_rule = FsData()
+        title_rule = data_rule.get_title_rule()
+        while True:
+            payload = json.dumps({
+                "cursor": ""
+            })
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取单条视频失败,请求失败"
+                ),
+                return
+            data = response['data']['data']
+            if len(data) == 0:
+                return
+            for index, video_obj in enumerate(data, 1):
+                try:
+                    self.aliyun_log.logging(
+                        code="1001", message="扫描到一条视频", data=video_obj
+                    )
+                    self.process_video_obj(video_obj, title_rule)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取单条视频失败, 该视频位于第{}页第{}条报错原因是{}".format(
+                            1, index, e
+                        ),
+                    )
+                if self.limit_flag:
+                    return
+                time.sleep(random.randint(1, 5))
+
+    def process_video_obj(self, video_obj, title_rule):
+        """
+        处理视频
+        :param video_obj:
+        """
+        time.sleep(random.randint(3, 8))
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        item = VideoItem()
+        item.add_video_info("video_id", video_obj["id"])
+        item.add_video_info("video_title", video_obj["title"])
+        item.add_video_info("play_cnt", video_obj["play_cnt"])
+        item.add_video_info("publish_time_stamp", int(time.time()))
+        item.add_video_info("out_user_id", video_obj["id"])
+        item.add_video_info("cover_url", video_obj["cover_path_oss"])
+        item.add_video_info("like_cnt", 0)
+        item.add_video_info("share_cnt", video_obj["share_cnt"])
+        item.add_video_info("comment_cnt", 0)
+        item.add_video_info("video_url", video_obj["video_path"])
+        item.add_video_info("out_video_id", video_obj["id"])
+        item.add_video_info("platform", self.platform)
+        item.add_video_info("strategy", self.mode)
+        item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+        item.add_video_info("user_id", our_user["uid"])
+        item.add_video_info("user_name", our_user["nick_name"])
+        mq_obj = item.produce_item()
+        pipeline = PiaoQuanPipeline(
+            platform=self.platform,
+            mode=self.mode,
+            rule_dict=self.rule_dict,
+            env=self.env,
+            item=mq_obj,
+            trace_id=trace_id,
+        )
+        if pipeline.process_item():
+            title_list = title_rule.split(",")
+            title = video_obj["title"]
+            contains_keyword = any(keyword in title for keyword in title_list)
+            if contains_keyword:
+                new_title = GPT4oMini.get_ai_mini_title(title)
+                if new_title:
+                    item.add_video_info("video_title", new_title)
+                    current_time = datetime.now()
+                    formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
+                    values = [
+                        [
+                            video_obj["urls"][0],
+                            video_obj["cover_url"],
+                            title,
+                            new_title,
+                            formatted_time,
+                        ]
+                    ]
+                    FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "rcQv7r", "ROWS", 1, 2)
+                    time.sleep(0.5)
+                    FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "rcQv7r", "A2:Z2", values)
+            self.download_cnt += 1
+            self.mq.send_msg(mq_obj)
+            self.aliyun_log.logging(code="1002", message="成功发送至 ETL", data=mq_obj)
+            if self.download_cnt >= int(
+                    self.rule_dict.get("videos_cnt", {}).get("min", 200)
+            ):
+                self.limit_flag = True
+
+    def run(self):
+        self.get_recommend_list()
+
+
+if __name__ == '__main__':
+    J = QZRecommend(
+        platform="quzhuan",
+        mode="recommend",
+        rule_dict={},
+        user_list=[{'uid': "123456", 'nick_name': "xiaoxiao"}],
+
+    )
+    J.get_recommend_list()
+    # J.logic()

+ 197 - 0
spider/crawler_online/zhufudewenhou.py

@@ -0,0 +1,197 @@
+import os
+import random
+import sys
+import time
+import uuid
+import json
+from datetime import datetime
+
+import cv2
+import requests
+
+from application.common.feishu import FsData
+from application.common.feishu.feishu_utils import FeishuUtils
+from application.common.gpt import GPT4oMini
+from application.common.mysql.sql import Sql
+from application.common.redis.xng_redis import xng_in_video_data
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.log import AliyunLogger
+from application.common.mysql import MysqlHelper
+
+
+
+class ZFDWHRecommend(object):
+
+    """
+    祝福的问候
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.limit_flag = False
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
+        self.mysql = MysqlHelper(mode=self.mode, platform=self)
+
+
+    def get_recommend_list(self):
+        print("祝福的问候")
+
+        """
+        获取推荐页视频
+        """
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        cursor  = ""
+        url = "http://8.217.192.46:8889/crawler/zhu_fu_de_wen_hou/recommend"
+        data_rule = FsData()
+        title_rule = data_rule.get_title_rule()
+        while True:
+            payload = json.dumps({
+                "cursor": cursor
+            })
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取单条视频失败,请求失败"
+                ),
+                return
+            cursor = response['data']["next_cursor"]
+            data = response['data']['data']
+            if len(data) == 0:
+                return
+            for index, video_obj in enumerate(data, 1):
+                try:
+                    self.aliyun_log.logging(
+                        code="1001", message="扫描到一条视频", data=video_obj
+                    )
+                    self.process_video_obj(video_obj,title_rule)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取单条视频失败, 该视频位于第{}页第{}条报错原因是{}".format(
+                            1, index, e
+                        ),
+                    )
+                if self.limit_flag:
+                    return
+                time.sleep(random.randint(1, 5))
+
+    def process_video_obj(self, video_obj,title_rule):
+        """
+        处理视频
+        :param video_obj:
+        """
+        time.sleep(random.randint(3, 8))
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        video_url = self.get_video_url(video_obj["id"])
+        if video_url:
+            item = VideoItem()
+            item.add_video_info("video_id", video_obj["id"])
+            item.add_video_info("video_title", video_obj["title"])
+            item.add_video_info("play_cnt", 0)
+            item.add_video_info("publish_time_stamp", int(time.time()))
+            item.add_video_info("out_user_id", video_obj["id"])
+            item.add_video_info("cover_url", video_obj["cover"])
+            item.add_video_info("like_cnt", 0)
+            item.add_video_info("share_cnt", 0)
+            item.add_video_info("comment_cnt", 0)
+            item.add_video_info("video_url", video_url)
+            item.add_video_info("out_video_id", video_obj["id"])
+            item.add_video_info("platform", self.platform)
+            item.add_video_info("strategy", self.mode)
+            item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+            item.add_video_info("user_id", our_user["uid"])
+            item.add_video_info("user_name", our_user["nick_name"])
+            mq_obj = item.produce_item()
+            pipeline = PiaoQuanPipeline(
+                platform=self.platform,
+                mode=self.mode,
+                rule_dict=self.rule_dict,
+                env=self.env,
+                item=mq_obj,
+                trace_id=trace_id,
+            )
+            if pipeline.process_item():
+                title_list = title_rule.split(",")
+                title = video_obj["title"]
+                contains_keyword = any(keyword in title for keyword in title_list)
+                if contains_keyword:
+                    new_title = GPT4oMini.get_ai_mini_title(title)
+                    if new_title:
+                        item.add_video_info("video_title", new_title)
+                        current_time = datetime.now()
+                        formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
+                        values = [
+                            [
+                                video_url,
+                                video_obj["cover"],
+                                title,
+                                new_title,
+                                formatted_time,
+                            ]
+                        ]
+                        FeishuUtils.insert_columns("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "8c7191", "ROWS", 1, 2)
+                        time.sleep(0.5)
+                        FeishuUtils.update_values("U5dXsSlPOhiNNCtEfgqcm1iYnpf", "8c7191", "A2:Z2", values)
+                self.download_cnt += 1
+                self.mq.send_msg(mq_obj)
+                self.aliyun_log.logging(code="1002", message="成功发送至 ETL", data=mq_obj)
+                if self.download_cnt >= int(
+                        self.rule_dict.get("videos_cnt", {}).get("min", 200)
+                ):
+                    self.limit_flag = True
+
+    """获取视频链接"""
+    def get_video_url(self, vid):
+        url = "http://8.217.192.46:8889/crawler/zhu_fu_de_wen_hou/detail"
+
+        payload = json.dumps({
+            "content_id": f"{vid}"
+        })
+        headers = {
+            'Content-Type': 'application/json'
+        }
+        try:
+            response = requests.request("POST", url, headers=headers, data=payload)
+            response = response.json()
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="获取视频链接失败"
+                ),
+                return None
+            video_url = response['data']['data']['video_url_list'][0]['video_url']
+            return video_url
+        except Exception as e:
+            return None
+
+    def run(self):
+        self.get_recommend_list()
+
+
+if __name__ == '__main__':
+    J = ZFDWHRecommend(
+        platform="zhufudewenhou",
+        mode="recommend",
+        rule_dict={},
+        user_list=[{'uid': "123456", 'nick_name': "xiaoxiao"}],
+
+    )
+    J.get_recommend_list()
+    # J.logic()

+ 21 - 1
spider/spider_map.py

@@ -7,15 +7,19 @@ value是爬虫封装好的类
 from spider.crawler_online import *
 from spider.crawler_online.benshanzhufu import BSZHRecommend
 from spider.crawler_online.dakaiyinghaoyun import DKYHYRecommend
+from spider.crawler_online.haoyoushipin import HYSPRecommend
 from spider.crawler_online.haoyunzhufuduo import HYZFDfRecommend
 from spider.crawler_online.jierizhufuhuakaifugui import JRZFHKFGRecommend
+from spider.crawler_online.jierizhufuxingfujixiang import JRZFXFJXRecommend
 from spider.crawler_online.lepaoledong import LPLDRecommend
 from spider.crawler_online.lepaoledongdijie import LPLDDJRecommend
 from spider.crawler_online.linglingkuailezhufu import LlklzfRecommend
 from spider.crawler_online.piaoquangushi import PQGSRecommend
+from spider.crawler_online.quzhuan import QZRecommend
 from spider.crawler_online.weiquanshipin import WQSPRecommend
 from spider.crawler_online.xiaoniangaotuijianliu import XNGTJLRecommend
 from spider.crawler_online.yuannifuqimanman import YNFQMMRecommend
+from spider.crawler_online.zhufudewenhou import ZFDWHRecommend
 from spider.crawler_online.zhufukuaizhuan import ZFKZRecommend
 from spider.crawler_online.zhufuniannianshunxinjixiang import ZFNNSXJXRecommend
 from spider.crawler_online.zhufuquanzituijianliu import ZFQZTJLRecommend
@@ -132,6 +136,22 @@ spider_map = {
     # 祝福年年顺心吉祥
     "zhufuniannianshunxinjixiang": {
         "recommend": ZFNNSXJXRecommend
-    }
+    },
+    # 节日祝福幸福吉祥
+    "jierizhufuxingfujixiang": {
+        "recommend": JRZFXFJXRecommend
+    },
+    # 好友视频
+    "haoyoushipin": {
+        "recommend": HYSPRecommend
+    },
+    # 趣转
+    "quzhuan": {
+        "recommend": QZRecommend
+    },
+    # 祝福的问候
+    "zhufudewenhou": {
+        "recommend": ZFDWHRecommend
+    },
 
 }