|  | @@ -423,10 +423,10 @@ class Follow:
 | 
	
		
			
				|  |  |                          }
 | 
	
		
			
				|  |  |                          our_uid = Users.create_user(log_type, crawler, create_user_dict, env)
 | 
	
		
			
				|  |  |                          Common.logger(log_type, crawler).info(f'新创建的站内UID:{our_uid}')
 | 
	
		
			
				|  |  | -                        if env == 'dev':
 | 
	
		
			
				|  |  | -                            our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
 | 
	
		
			
				|  |  | -                        else:
 | 
	
		
			
				|  |  | +                        if env == 'prod':
 | 
	
		
			
				|  |  |                              our_user_link = f'https://admin.piaoquantv.com/ums/user/{our_uid}/post'
 | 
	
		
			
				|  |  | +                        else:
 | 
	
		
			
				|  |  | +                            our_user_link = f'https://testadmin.piaoquantv.com/ums/user/{our_uid}/post'
 | 
	
		
			
				|  |  |                          Common.logger(log_type, crawler).info(f'站内用户主页链接:{our_user_link}')
 | 
	
		
			
				|  |  |                          Feishu.update_values(log_type, crawler, sheetid, f'G{i + 1}:H{i + 1}', [[our_uid, our_user_link]])
 | 
	
		
			
				|  |  |                          Common.logger(log_type, crawler).info(f'站内用户信息写入飞书成功!')
 | 
	
	
		
			
				|  | @@ -915,12 +915,13 @@ class Follow:
 | 
	
		
			
				|  |  |                  if 'title' not in  videoDetails:
 | 
	
		
			
				|  |  |                      video_title = ''
 | 
	
		
			
				|  |  |                  else:
 | 
	
		
			
				|  |  | -                    video_title = videoDetails['title'].replace("&", "").strip().replace("\n", "") \
 | 
	
		
			
				|  |  | -                            .replace("/", "").replace("\r", "").replace("#", "") \
 | 
	
		
			
				|  |  | -                            .replace(".", "。").replace("\\", "").replace("&NBSP", "")
 | 
	
		
			
				|  |  | +                    video_title = videoDetails['title']
 | 
	
		
			
				|  |  |                  video_title = cls.filter_emoji(video_title)
 | 
	
		
			
				|  |  |                  # if Translate.is_contains_chinese(video_title) is False:
 | 
	
		
			
				|  |  | -                video_title = Translate.google_translate(video_title, machine)  # 自动翻译标题为中文
 | 
	
		
			
				|  |  | +                video_title = Translate.google_translate(video_title, machine)\
 | 
	
		
			
				|  |  | +                    .strip().replace("\\", "").replace(" ", "").replace("\n", "")\
 | 
	
		
			
				|  |  | +                    .replace("/", "").replace("\r", "").replace("&NBSP", "").replace("&", "")\
 | 
	
		
			
				|  |  | +                    .replace(";", "").replace("amp;", "")# 自动翻译标题为中文
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |                  if 'lengthSeconds' not in videoDetails:
 | 
	
		
			
				|  |  |                      duration = 0
 | 
	
	
		
			
				|  | @@ -1027,11 +1028,11 @@ class Follow:
 | 
	
		
			
				|  |  |                  Common.download_method(log_type, crawler, 'youtube_video', video_dict['video_title'], video_dict['video_url'])
 | 
	
		
			
				|  |  |                  # ffmpeg_dict = Common.ffmpeg(log_type, crawler, f"./{crawler}/videos/{video_dict['video_title']}/video.mp4")
 | 
	
		
			
				|  |  |                  # video_width = int(ffmpeg_dict['width'])
 | 
	
		
			
				|  |  | -                video_width = 1280
 | 
	
		
			
				|  |  |                  # video_height = int(ffmpeg_dict['height'])
 | 
	
		
			
				|  |  | +                # video_size = int(ffmpeg_dict['size'])
 | 
	
		
			
				|  |  | +                video_width = 1280
 | 
	
		
			
				|  |  |                  video_height = 720
 | 
	
		
			
				|  |  |                  duration = int(video_dict['duration'])
 | 
	
		
			
				|  |  | -                # video_size = int(ffmpeg_dict['size'])
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  |                  Common.logger(log_type, crawler).info(f'video_width:{video_width}')
 | 
	
		
			
				|  |  |                  Common.logger(log_type, crawler).info(f'video_height:{video_height}')
 | 
	
	
		
			
				|  | @@ -1052,81 +1053,86 @@ class Follow:
 | 
	
		
			
				|  |  |                  #     shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
 | 
	
		
			
				|  |  |                  #     Common.logger(log_type, crawler).info(f"时长:{video_dict['duration']}不满足抓取规则,删除成功\n")
 | 
	
		
			
				|  |  |                  #     return
 | 
	
		
			
				|  |  | -                if duration == 0  or duration is None:
 | 
	
		
			
				|  |  | +                # if duration == 0  or duration is None:
 | 
	
		
			
				|  |  | +                #     # 删除视频文件夹
 | 
	
		
			
				|  |  | +                #     shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
 | 
	
		
			
				|  |  | +                #     Common.logger(log_type, crawler).info(f"视频下载出错,删除成功\n")
 | 
	
		
			
				|  |  | +                #     return
 | 
	
		
			
				|  |  | +                # else:
 | 
	
		
			
				|  |  | +                # 下载封面
 | 
	
		
			
				|  |  | +                Common.download_method(log_type, crawler, 'cover', video_dict['video_title'], video_dict['cover_url'])
 | 
	
		
			
				|  |  | +                # 保存视频文本信息
 | 
	
		
			
				|  |  | +                Common.save_video_info(log_type, crawler, video_dict)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +                # 上传视频
 | 
	
		
			
				|  |  | +                Common.logger(log_type, crawler).info(f"开始上传视频")
 | 
	
		
			
				|  |  | +                if env == 'dev':
 | 
	
		
			
				|  |  | +                    our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
 | 
	
		
			
				|  |  | +                    our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
 | 
	
		
			
				|  |  | +                else:
 | 
	
		
			
				|  |  | +                    our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
 | 
	
		
			
				|  |  | +                    our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
 | 
	
		
			
				|  |  | +                Common.logger(log_type, crawler).info("视频上传完成")
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +                if our_video_id is None:
 | 
	
		
			
				|  |  |                      # 删除视频文件夹
 | 
	
		
			
				|  |  |                      shutil.rmtree(f"./{crawler}/videos/{video_dict['video_title']}/")
 | 
	
		
			
				|  |  | -                    Common.logger(log_type, crawler).info(f"视频下载出错,删除成功\n")
 | 
	
		
			
				|  |  |                      return
 | 
	
		
			
				|  |  | -                else:
 | 
	
		
			
				|  |  | -                    # 下载封面
 | 
	
		
			
				|  |  | -                    Common.download_method(log_type, crawler, 'cover', video_dict['video_title'], video_dict['cover_url'])
 | 
	
		
			
				|  |  | -                    # 保存视频文本信息
 | 
	
		
			
				|  |  | -                    Common.save_video_info(log_type, crawler, video_dict)
 | 
	
		
			
				|  |  | -
 | 
	
		
			
				|  |  | -                    # 上传视频
 | 
	
		
			
				|  |  | -                    Common.logger(log_type, crawler).info(f"开始上传视频")
 | 
	
		
			
				|  |  | -                    if env == 'dev':
 | 
	
		
			
				|  |  | -                        our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
 | 
	
		
			
				|  |  | -                        our_video_link = f"https://testadmin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
 | 
	
		
			
				|  |  | -                    else:
 | 
	
		
			
				|  |  | -                        our_video_id = Publish.upload_and_publish(log_type, crawler, strategy, our_uid, env, oss_endpoint)
 | 
	
		
			
				|  |  | -                        our_video_link = f"https://admin.piaoquantv.com/cms/post-detail/{our_video_id}/info"
 | 
	
		
			
				|  |  | -                    Common.logger(log_type, crawler).info("视频上传完成")
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -                    # 视频信息保存至飞书
 | 
	
		
			
				|  |  | -                    Feishu.insert_columns(log_type, crawler, "GVxlYk", "ROWS", 1, 2)
 | 
	
		
			
				|  |  | -                    # 视频ID工作表,首行写入数据
 | 
	
		
			
				|  |  | -                    upload_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
 | 
	
		
			
				|  |  | -                    values = [[upload_time,
 | 
	
		
			
				|  |  | -                               "定向榜",
 | 
	
		
			
				|  |  | -                               video_dict['video_id'],
 | 
	
		
			
				|  |  | -                               video_dict['video_title'],
 | 
	
		
			
				|  |  | -                               our_video_link,
 | 
	
		
			
				|  |  | -                               video_dict['play_cnt'],
 | 
	
		
			
				|  |  | -                               video_dict['duration'],
 | 
	
		
			
				|  |  | -                               f'{video_width}*{video_height}',
 | 
	
		
			
				|  |  | -                               video_dict['publish_time'],
 | 
	
		
			
				|  |  | -                               video_dict['user_name'],
 | 
	
		
			
				|  |  | -                               video_dict['cover_url'],
 | 
	
		
			
				|  |  | -                               video_dict['video_url']
 | 
	
		
			
				|  |  | -                               ]]
 | 
	
		
			
				|  |  | -                    time.sleep(1)
 | 
	
		
			
				|  |  | -                    Feishu.update_values(log_type, crawler, "GVxlYk", "F2:Z2", values)
 | 
	
		
			
				|  |  | -                    Common.logger(log_type, crawler).info('视频信息写入定向_已下载表成功\n')
 | 
	
		
			
				|  |  | +                # 视频信息保存至飞书
 | 
	
		
			
				|  |  | +                Feishu.insert_columns(log_type, crawler, "GVxlYk", "ROWS", 1, 2)
 | 
	
		
			
				|  |  | +                # 视频ID工作表,首行写入数据
 | 
	
		
			
				|  |  | +                upload_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
 | 
	
		
			
				|  |  | +                values = [[upload_time,
 | 
	
		
			
				|  |  | +                           "定向榜",
 | 
	
		
			
				|  |  | +                           video_dict['video_id'],
 | 
	
		
			
				|  |  | +                           video_dict['video_title'],
 | 
	
		
			
				|  |  | +                           our_video_link,
 | 
	
		
			
				|  |  | +                           video_dict['play_cnt'],
 | 
	
		
			
				|  |  | +                           video_dict['duration'],
 | 
	
		
			
				|  |  | +                           f'{video_width}*{video_height}',
 | 
	
		
			
				|  |  | +                           video_dict['publish_time'],
 | 
	
		
			
				|  |  | +                           video_dict['user_name'],
 | 
	
		
			
				|  |  | +                           video_dict['cover_url'],
 | 
	
		
			
				|  |  | +                           video_dict['video_url']
 | 
	
		
			
				|  |  | +                           ]]
 | 
	
		
			
				|  |  | +                time.sleep(1)
 | 
	
		
			
				|  |  | +                Feishu.update_values(log_type, crawler, "GVxlYk", "F2:Z2", values)
 | 
	
		
			
				|  |  | +                Common.logger(log_type, crawler).info('视频信息写入定向_已下载表成功\n')
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | -                    # 视频信息保存数据库
 | 
	
		
			
				|  |  | -                    sql = f""" insert into crawler_video(video_id, 
 | 
	
		
			
				|  |  | -                    user_id, 
 | 
	
		
			
				|  |  | -                    out_user_id, 
 | 
	
		
			
				|  |  | -                    platform, 
 | 
	
		
			
				|  |  | -                    strategy, 
 | 
	
		
			
				|  |  | -                    out_video_id, 
 | 
	
		
			
				|  |  | -                    video_title, 
 | 
	
		
			
				|  |  | -                    cover_url, 
 | 
	
		
			
				|  |  | -                    video_url, 
 | 
	
		
			
				|  |  | -                    duration, 
 | 
	
		
			
				|  |  | -                    publish_time, 
 | 
	
		
			
				|  |  | -                    play_cnt, 
 | 
	
		
			
				|  |  | -                    crawler_rule, 
 | 
	
		
			
				|  |  | -                    width, 
 | 
	
		
			
				|  |  | -                    height) 
 | 
	
		
			
				|  |  | -                    values({our_video_id}, 
 | 
	
		
			
				|  |  | -                    "{our_uid}", 
 | 
	
		
			
				|  |  | -                    "{video_dict['out_uid']}", 
 | 
	
		
			
				|  |  | -                    "{cls.platform}", 
 | 
	
		
			
				|  |  | -                    "定向爬虫策略", 
 | 
	
		
			
				|  |  | -                    "{video_dict['video_id']}", 
 | 
	
		
			
				|  |  | -                    "{video_dict['video_title']}", 
 | 
	
		
			
				|  |  | -                    "{video_dict['cover_url']}",
 | 
	
		
			
				|  |  | -                    "{video_dict['video_url']}",
 | 
	
		
			
				|  |  | -                    {int(duration)},
 | 
	
		
			
				|  |  | -                    "{video_dict['publish_time']}",
 | 
	
		
			
				|  |  | -                    {int(video_dict['play_cnt'])},
 | 
	
		
			
				|  |  | -                    "{rule}",
 | 
	
		
			
				|  |  | -                    {int(video_width)},
 | 
	
		
			
				|  |  | -                    {int(video_height)}) """
 | 
	
		
			
				|  |  | -                    MysqlHelper.update_values(log_type, crawler, sql, env, machine)
 | 
	
		
			
				|  |  | -                    Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
 | 
	
		
			
				|  |  | +                # 视频信息保存数据库
 | 
	
		
			
				|  |  | +                sql = f""" insert into crawler_video(video_id,
 | 
	
		
			
				|  |  | +                user_id,
 | 
	
		
			
				|  |  | +                out_user_id,
 | 
	
		
			
				|  |  | +                platform,
 | 
	
		
			
				|  |  | +                strategy,
 | 
	
		
			
				|  |  | +                out_video_id,
 | 
	
		
			
				|  |  | +                video_title,
 | 
	
		
			
				|  |  | +                cover_url,
 | 
	
		
			
				|  |  | +                video_url,
 | 
	
		
			
				|  |  | +                duration,
 | 
	
		
			
				|  |  | +                publish_time,
 | 
	
		
			
				|  |  | +                play_cnt,
 | 
	
		
			
				|  |  | +                crawler_rule,
 | 
	
		
			
				|  |  | +                width,
 | 
	
		
			
				|  |  | +                height)
 | 
	
		
			
				|  |  | +                values({our_video_id},
 | 
	
		
			
				|  |  | +                "{our_uid}",
 | 
	
		
			
				|  |  | +                "{video_dict['out_uid']}",
 | 
	
		
			
				|  |  | +                "{cls.platform}",
 | 
	
		
			
				|  |  | +                "定向爬虫策略",
 | 
	
		
			
				|  |  | +                "{video_dict['video_id']}",
 | 
	
		
			
				|  |  | +                "{video_dict['video_title']}",
 | 
	
		
			
				|  |  | +                "{video_dict['cover_url']}",
 | 
	
		
			
				|  |  | +                "{video_dict['video_url']}",
 | 
	
		
			
				|  |  | +                {int(duration)},
 | 
	
		
			
				|  |  | +                "{video_dict['publish_time']}",
 | 
	
		
			
				|  |  | +                {int(video_dict['play_cnt'])},
 | 
	
		
			
				|  |  | +                "{rule}",
 | 
	
		
			
				|  |  | +                {int(video_width)},
 | 
	
		
			
				|  |  | +                {int(video_height)}) """
 | 
	
		
			
				|  |  | +                MysqlHelper.update_values(log_type, crawler, sql, env, machine)
 | 
	
		
			
				|  |  | +                Common.logger(log_type, crawler).info('视频信息插入数据库成功!\n')
 | 
	
		
			
				|  |  |          except Exception as e:
 | 
	
		
			
				|  |  |              Common.logger(log_type, crawler).info(f"download_publish异常:{e}\n")
 | 
	
		
			
				|  |  |  
 | 
	
	
		
			
				|  | @@ -1159,4 +1165,7 @@ if __name__ == "__main__":
 | 
	
		
			
				|  |  |      # Follow.get_follow_videos('follow', 'youtube', 'youtube_follow', 'out', 'dev', 'local')
 | 
	
		
			
				|  |  |      # print(Follow.filter_emoji("姐妹倆一唱一和,完美配合,終於把大慶降服了😅😅#萌娃搞笑日常"))
 | 
	
		
			
				|  |  |      # Follow.repeat_video('follow', 'youtube', 4, "dev", "local")
 | 
	
		
			
				|  |  | +    # title = "'西部巡游220丨两人一车环游中国半年,需要花费多少钱? 2万公里吃住行费用总结'"
 | 
	
		
			
				|  |  | +    title = "'Insanely Crowded Shanghai Yu Garden Lantern Festival Walk Tour 2023 人气爆棚的上海豫园元宵节漫步之行 4K'"
 | 
	
		
			
				|  |  | +    print(title.strip().replace("\\", "").replace(" ", "").replace("\n", "").replace("/", "").replace("\r", "").replace("&NBSP", "").replace("&", ""))
 | 
	
		
			
				|  |  |      pass
 |