zhangyong 5 ماه پیش
والد
کامیت
73dbcea107
6فایلهای تغییر یافته به همراه99 افزوده شده و 33 حذف شده
  1. 1 1
      common/mysql_db_aigc.py
  2. 10 1
      common/sql_help.py
  3. 34 0
      data_channel/ks_feed.py
  4. 29 31
      data_channel/shipinhaodandian.py
  5. 20 0
      job_ks_feed.py
  6. 5 0
      video_rewriting/video_processor.py

+ 1 - 1
common/mysql_db_aigc.py

@@ -19,7 +19,7 @@ class AigcMysqlHelper:
         return connection
 
     @classmethod
-    def get_values(cls, sql, params):
+    def get_values(cls, sql, params=None):
         try:
             # 连接数据库
             connect = cls.connect_mysql()

+ 10 - 1
common/sql_help.py

@@ -183,7 +183,16 @@ class sqlCollect():
         获取视频号单点内容
         """
         sql = f"""select video_id,title,author_id,author_name,cover_url,video_url,video_duration,from_user_id,from_user_name,from_group_id,from_group_name,source,wx_msg, is_encrypted, decode_key  from dandian_content where from_user_name = %s and has_used = 0 ORDER BY create_timestamp DESC  limit 2"""
-        data = AigcMysqlHelper.get_values(sql, (url))
+        data = AigcMysqlHelper.get_values(sql, (url,))
+        return data
+
+    @classmethod
+    def get_feed_data(cls, channel):
+        """
+        获取feed流视频
+        """
+        sql = f"""select video_id,channel,video_url,cover_url,title  from automator_feed_video where channel = %s and has_used = 0 and if_50_like = 1 limit 1"""
+        data = AigcMysqlHelper.get_values(sql, (channel,))
         return data
 
     @classmethod

+ 34 - 0
data_channel/ks_feed.py

@@ -0,0 +1,34 @@
+from common import AliyunLogger
+from common.sql_help import sqlCollect
+
+
+class KSFeed:
+    @classmethod
+    def get_feed_date(cls):
+        try:
+            data_list = sqlCollect.get_feed_data("快手")
+            list = []
+            if data_list:
+                for data in data_list:
+                    cover_url = data[3]
+                    video_url = data[2]
+                    if video_url and cover_url:
+                        video_id = data[0]
+                        channel = data[1]
+                        title = data[4]
+                        log_data = f"user:{channel},,video_id:{video_id},,video_url:{video_url},,original_title:{title}"
+                        AliyunLogger.logging(channel, channel, video_url, video_id, "扫描到一条视频",
+                                             "2001", log_data)
+                        AliyunLogger.logging(channel, channel, video_url, video_id, "符合规则等待改造",
+                                             "2004", log_data)
+                        all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "old_title": title, "rule":''}
+                        list.append(all_data)
+                return list
+            else:
+                return list
+        except Exception:
+            return list
+
+
+if __name__ == '__main__':
+    KSFeed.get_feed_date()

+ 29 - 31
data_channel/shipinhaodandian.py

@@ -6,41 +6,39 @@ class SPHDD:
 
     @classmethod
     def get_sphdd_data(cls, url, channel_id, name):
-        data_list = sqlCollect.get_shp_dd_data(url)
         list = []
-        if data_list:
-            try:
+        try:
+            data_list = sqlCollect.get_shp_dd_data(url)
+
+            if data_list:
                 for data in data_list:
-                    try:
-                        cover_url = data[4]
-                        video_url = data[5]
-                        if video_url and cover_url:
-                            video_id = data[0]
-                            old_title = data[1]
-                            # author_id = data[2]
-                            author_name = data[3]
-                            video_duration = data[6]
-                            from_user_id = data[7]
-                            from_user_name = data[8]
-                            from_group_id = data[9]
-                            from_group_name = data[10]
-                            source = data[11]
-                            wx_msg = data[12]
-                            is_encrypted = data[13]
-                            decode_key = data[14]
-                            log_data = f"user:{url},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title}"
-                            AliyunLogger.logging(f"{channel_id}-{source}", name, url, video_id, "扫描到一条视频", "2001", log_data)
-                            AliyunLogger.logging(f"{channel_id}-{source}", name, url, video_id, "符合规则等待改造", "2004", log_data)
-                            all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": author_name,
-                                        "old_title": old_title, "from_user_name": from_user_name, "from_group_name": from_group_name, "source": source, "wx_msg": wx_msg, "is_encrypted": is_encrypted, "decode_key": decode_key}
-                            list.append(all_data)
-                    except Exception as e:
-                        print(e)
-                        continue
+                    cover_url = data[4]
+                    video_url = data[5]
+                    if video_url and cover_url:
+                        video_id = data[0]
+                        old_title = data[1]
+                        # author_id = data[2]
+                        author_name = data[3]
+                        video_duration = data[6]
+                        from_user_id = data[7]
+                        from_user_name = data[8]
+                        from_group_id = data[9]
+                        from_group_name = data[10]
+                        source = data[11]
+                        wx_msg = data[12]
+                        is_encrypted = data[13]
+                        decode_key = data[14]
+                        log_data = f"user:{url},,video_id:{video_id},,video_url:{video_url},,original_title:{old_title}"
+                        AliyunLogger.logging(f"{channel_id}-{source}", name, url, video_id, "扫描到一条视频", "2001", log_data)
+                        AliyunLogger.logging(f"{channel_id}-{source}", name, url, video_id, "符合规则等待改造", "2004", log_data)
+                        all_data = {"video_id": video_id, "cover": cover_url, "video_url": video_url, "rule": author_name,
+                                    "old_title": old_title, "from_user_name": from_user_name, "from_group_name": from_group_name, "source": source, "wx_msg": wx_msg, "is_encrypted": is_encrypted, "decode_key": decode_key}
+                        list.append(all_data)
                 return list
-            except Exception:
+            else:
                 return list
-        return list
+        except Exception:
+            return list
 
 
 

+ 20 - 0
job_ks_feed.py

@@ -0,0 +1,20 @@
+
+import time
+from common import Material
+
+from video_rewriting.video_processor import VideoProcessor
+def video_task_start():
+    """处理视频任务,返回用户名并根据结果决定延迟时间"""
+    data = Material.feishu_list()[17]
+    while True:
+        try:
+            print("开始执行任务")
+            mark = VideoProcessor.main(data)
+            print(f"返回用户名: {mark}")
+            time.sleep(120 if mark else 120)  # 根据 mark 是否为空设置延迟
+        except Exception as e:
+            print("处理任务时出现异常:", e)
+            time.sleep(10)
+            continue
+if __name__ == '__main__':
+    video_task_start()

+ 5 - 0
video_rewriting/video_processor.py

@@ -17,6 +17,7 @@ from common.gpt4o_help import GPT4o
 from data_channel.douyin import DY
 from data_channel.dy_keyword import DyKeyword
 from data_channel.dy_ls import DYLS
+from data_channel.ks_feed import KSFeed
 from data_channel.ks_keyword import KsKeyword
 from data_channel.ks_ls import KSLS
 from data_channel.ks_pc_keyword import KsPcKeyword
@@ -404,6 +405,8 @@ class VideoProcessor:
                                 sheet = "ibjoMx"
                             elif name == "品类关键词搜索":
                                 sheet = "Tgpikc"
+                            elif name == "快手推荐流":
+                                sheet = "9Ii8lw"
                             Feishu.insert_columns("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "ROWS", 1, 2)
                             time.sleep(0.5)
                             Feishu.update_values("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "A2:Z2", values)
@@ -444,6 +447,8 @@ class VideoProcessor:
             return KsKeyword.get_key_word(url, task_mark, mark, channel_id, name, task)
         elif channel_id == '视频号搜索':
             return SphKeyword.get_key_word(url, task_mark, mark, channel_id, name)
+        elif channel_id == '快手推荐流':
+            return KSFeed.get_feed_date()
 
 
     @classmethod