|
@@ -122,16 +122,19 @@ class getVideo:
|
|
|
v_id = video["video_id"]
|
|
|
cover = video["cover"]
|
|
|
video_url = video["video_url"]
|
|
|
+ rule = video['rule']
|
|
|
time.sleep(1)
|
|
|
pw_random_id = cls.random_id()
|
|
|
if channel_id == "票圈":
|
|
|
new_video_path = PQ.download_video(video_url, video_path_url, v_id) # 下载视频地址
|
|
|
else:
|
|
|
new_video_path = Oss.download_video_oss(video_url, video_path_url, v_id) # 下载视频地址
|
|
|
+ Common.logger("log").info(f"{task_mark}下的视频{url},{new_video_path}视频下载成功")
|
|
|
if not os.path.isfile(new_video_path):
|
|
|
Common.logger("log").info(f"{task_mark}下的视频{url},{new_video_path}视频下载失败")
|
|
|
cls.remove_files(video_path_url)
|
|
|
continue
|
|
|
+ Common.logger("log").info(f"{task_mark}下的视频{url},{new_video_path}视频下载成功")
|
|
|
if crop_total and crop_total != 'None': # 判断是否需要裁剪
|
|
|
new_video_path = FFmpeg.video_crop(new_video_path, video_path_url, pw_random_id)
|
|
|
if gg_duration_total and gg_duration_total != 'None': # 判断是否需要指定视频时长
|
|
@@ -195,7 +198,7 @@ class getVideo:
|
|
|
sqlCollect.insert_task(task_mark, v_id, mark, channel_id) # 插入数据库
|
|
|
current_time = datetime.now()
|
|
|
formatted_time = current_time.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
- values = [[name, task_mark, channel_id, url, v_id, piaoquan_id, new_title, str(code), formatted_time]]
|
|
|
+ values = [[name, task_mark, channel_id, url, v_id, piaoquan_id, new_title, str(code), formatted_time, str(rule)]]
|
|
|
# 使用锁保护表格插入操作
|
|
|
with lock:
|
|
|
if name == "王雪珂":
|
|
@@ -214,6 +217,8 @@ class getVideo:
|
|
|
sheet = "bBHFwC"
|
|
|
elif name == "刘诗雨":
|
|
|
sheet = "fBdxIQ"
|
|
|
+ elif name == "信欣":
|
|
|
+ sheet = "lPe1eT"
|
|
|
Feishu.insert_columns("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "ROWS", 1, 2)
|
|
|
time.sleep(0.5)
|
|
|
Feishu.update_values("ILb4sa0LahddRktnRipcu2vQnLb", sheet, "A2:Z2", values)
|
|
@@ -226,7 +231,7 @@ class getVideo:
|
|
|
cls.remove_files(video_path_url)
|
|
|
Common.logger("warning").warning(f"{name}的{task_mark}任务处理失败:{e}\n")
|
|
|
|
|
|
- batch_size = 2
|
|
|
+ batch_size = 1
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=batch_size) as executor:
|
|
|
index = 0
|
|
|
while index < len(task_data):
|