Przeglądaj źródła

新增本山祝福渠道

zhangyong 5 miesięcy temu
rodzic
commit
d7ee2c8116

+ 1 - 0
application/config/topic_group_queue.py

@@ -20,6 +20,7 @@ class TopicGroup(object):
             ('dkyhy', 'recommend', 'dakaiyinghaoyun'),
             ('xngtjl', 'recommend', 'xiaoniangaotuijianliu'),
             ('zfqztjl', 'recommend', 'zhufuquanzituijianliu'),
+            ('bszf', 'recommend', 'benshanzhufu'),
             ('ttjfq', 'recommend', 'tiantianjufuqi')
         ]
 

+ 157 - 0
spider/crawler_online/benshanzhufu.py

@@ -0,0 +1,157 @@
+import os
+import random
+import sys
+import time
+import uuid
+import json
+
+from datetime import datetime
+
+import cv2
+import requests
+
+from application.common import Feishu
+
+sys.path.append(os.getcwd())
+
+from application.items import VideoItem
+from application.pipeline import PiaoQuanPipeline
+from application.common.messageQueue import MQ
+from application.common.log import AliyunLogger
+from application.common.mysql import MysqlHelper
+
+
+
+class BSZHRecommend(object):
+
+    """
+    本山祝福推荐流
+    """
+
+    def __init__(self, platform, mode, rule_dict, user_list, env="prod"):
+        self.limit_flag = False
+        self.platform = platform
+        self.mode = mode
+        self.rule_dict = rule_dict
+        self.user_list = user_list
+        self.env = env
+        self.download_cnt = 0
+        self.mq = MQ(topic_name="topic_crawler_etl_" + self.env)
+        self.expire_flag = False
+        self.aliyun_log = AliyunLogger(mode=self.mode, platform=self.platform)
+        self.mysql = MysqlHelper(mode=self.mode, platform=self)
+
+    def get_video_duration(self, video_link: str) -> int:
+        cap = cv2.VideoCapture(video_link)
+        if cap.isOpened():
+            rate = cap.get(5)
+            frame_num = cap.get(7)
+            duration = int(frame_num / rate)
+            return duration
+        return 0
+
+    def get_recommend_list(self):
+        print("本山祝福开始")
+
+        """
+        获取推荐页视频
+        """
+        url = "http://47.236.68.175:8889/crawler/ben_shan_zhu_fu/recommend"
+        next_cursor = 1
+        for i in range(5):
+            payload = json.dumps({
+                "cursor": f"{next_cursor}"
+            })
+            headers = {
+                'Content-Type': 'application/json'
+            }
+            for i in range(3):
+                response = requests.request("POST", url, headers=headers, data=payload)
+                response = response.json()
+                if response['code'] != 0:
+                    time.sleep(2)
+                    continue
+                else:
+                    break
+            if response['code'] != 0:
+                self.aliyun_log.logging(
+                    code="3000",
+                    message="抓取单条视频失败,请求失败"
+                ),
+                return
+            for index, video_obj in enumerate(response['data']['data'], 1):
+                try:
+                    self.aliyun_log.logging(
+                        code="1001", message="扫描到一条视频", data=video_obj
+                    )
+                    next_cursor = response['data']['next_cursor']
+                    self.process_video_obj(video_obj)
+                except Exception as e:
+                    self.aliyun_log.logging(
+                        code="3000",
+                        message="抓取单条视频失败, 该视频位于第{}页第{}条报错原因是{}".format(
+                            1, index, e
+                        ),
+                    )
+                if self.limit_flag:
+                    return
+                time.sleep(random.randint(5, 10))
+
+    def process_video_obj(self, video_obj):
+        """
+        处理视频
+        :param video_obj:
+        """
+        time.sleep(random.randint(3, 8))
+        trace_id = self.platform + str(uuid.uuid1())
+        our_user = random.choice(self.user_list)
+        item = VideoItem()
+        # id = uuid.uuid4()
+        item.add_video_info("video_id", video_obj["nid"])
+        item.add_video_info("video_title", video_obj["title"])
+        item.add_video_info("play_cnt", 0)
+        item.add_video_info("publish_time_stamp", int(video_obj["update_time"]))
+        item.add_video_info("out_user_id", video_obj["nid"])
+        item.add_video_info("cover_url", video_obj["video_cover"])
+        item.add_video_info("like_cnt", 0)
+        item.add_video_info("video_url", video_obj["video_url"])
+        item.add_video_info("out_video_id", video_obj["nid"])
+        item.add_video_info("platform", self.platform)
+        item.add_video_info("strategy", self.mode)
+        item.add_video_info("session", "{}-{}".format(self.platform, int(time.time())))
+        item.add_video_info("user_id", our_user["uid"])
+        item.add_video_info("user_name", our_user["nick_name"])
+        mq_obj = item.produce_item()
+        pipeline = PiaoQuanPipeline(
+            platform=self.platform,
+            mode=self.mode,
+            rule_dict=self.rule_dict,
+            env=self.env,
+            item=mq_obj,
+            trace_id=trace_id,
+        )
+        if pipeline.process_item():
+            self.download_cnt += 1
+            self.mq.send_msg(mq_obj)
+            self.aliyun_log.logging(code="1002", message="成功发送至 ETL", data=mq_obj)
+            if self.download_cnt >= int(
+                    self.rule_dict.get("videos_cnt", {}).get("min", 200)
+            ):
+                self.limit_flag = True
+
+
+
+    def run(self):
+        self.get_recommend_list()
+
+
+if __name__ == '__main__':
+    J = BSZHRecommend(
+        platform="benshanzhufu",
+        mode="recommend",
+        rule_dict={},
+        user_list=[{'uid': "123456", 'nick_name': "xiaoxiao"}],
+
+    )
+    J.get_recommend_list()
+    # J.logic()

+ 5 - 0
spider/spider_map.py

@@ -5,6 +5,7 @@ sub_key是recommend,author, value;
 value是爬虫封装好的类
 """
 from spider.crawler_online import *
+from spider.crawler_online.benshanzhufu import BSZHRecommend
 from spider.crawler_online.dakaiyinghaoyun import DKYHYRecommend
 from spider.crawler_online.xiaoniangaotuijianliu import XNGTJLRecommend
 from spider.crawler_online.zhufuquanzituijianliu import ZFQZTJLRecommend
@@ -74,6 +75,10 @@ spider_map = {
     "xiaoniangaotuijianliu": {
         "recommend": XNGTJLRecommend
     },
+    # 本山祝福
+    "benshanzhufu": {
+        "recommend": BSZHRecommend
+    },
     # 祝福圈子推荐流
     "zhufuquanzituijianliu": {
         "recommend": ZFQZTJLRecommend