// // PQGPUImagePlayer.swift // GPUImage_iOS // // Created by ak on 2020/8/27. // Copyright © 2020 Sunset Lake Software LLC. All rights reserved. // 功能:滤镜播放器 支持音频 https://juejin.im/post/6844904024760664078 这个有用 import AVFoundation import AVKit import UIKit import ObjectMapper // import GPUImage struct AVAssetKey { static let tracks = "tracks" static let duration = "duration" static let metadata = "commonMetadata" } // 播放器状态 public enum PQGPUImagePlayerViewStatus: Int { case playing = 10 case pause = 20 case stop = 30 case error = 0 case unknow = -1000 } public class PQGPUImagePlayerView: UIView { public private(set) var playbackTime: TimeInterval = 0 { willSet { playbackTimeChangeClosure?(newValue) } } public var mCanverSize: CGSize = .zero // 自动隐藏边框 public var isAutoHiden: Bool = false // 是否显示边框 public var isShowLine: Bool = true // 播放进度 public var playbackTimeChangeClosure: ((_ time: TimeInterval) -> Void)? // 参数说明:1,当前时间 2,总时长 3,进度 public var progress: ((Double, Double, Double) -> Void)? /// 预览区域点击回调 public var renderViewOnClickHandle: (() -> Void)? public private(set) var asset: AVAsset? public var duration: TimeInterval { return asset?.duration.seconds ?? 0 } public private(set) var status: PQGPUImagePlayerViewStatus = .unknow { willSet { statusChangeClosure?(newValue) } } public var statusChangeClosure: ((_ status: PQGPUImagePlayerViewStatus) -> Void)? public private(set) var isReadyToPlay = false { willSet { assetLoadClosure?(newValue) } } public var assetLoadClosure: ((_ isReadyToPlay: Bool) -> Void)? /// Called when video finished /// This closure will not called if isLoop is true public var finishedClosure: (() -> Void)? /// Set this attribute to true will print debug info public var enableDebug = false { willSet { movie?.runBenchmark = newValue } } /// Setting this attribute before the end of the video works public var isLoop = false { willSet { movie?.loop = newValue } } /// The player will control the animationLayer of animation with the property `timeOffset` /// You can set up some animations in this layer like caption public var animationLayer: CALayer? { willSet { // Set speed to 0, use timeOffset to control the animation newValue?.speed = 0 newValue?.timeOffset = playbackTime } didSet { oldValue?.removeFromSuperlayer() } } /// Add filters to this array and call updateAsset(_:) method public var filters: [ImageProcessingOperation] = [] public var movie: PQMovieInput? public var speaker: SpeakerOutput? /// Volumn of original sounds in AVAsset public var originVolumn: Float = 1.0 { didSet {} } public var playerLayer: AVPlayerLayer? public var player: AVPlayer? public var playerEmptyView: UIImageView! public var borderLayer: CAShapeLayer? public var mPlayeTimeRange: CMTimeRange? var mStickers: [PQEditVisionTrackMaterialsModel]? { didSet { FilterLog(message: "设置线程为: \(Thread.current) \(OperationQueue.current?.underlyingQueue?.label as Any)") configCache(beginTime: mStickers?.first?.timelineIn ?? 0) } } // 是否显示时间条 var showProgressLab: Bool = true // 缓存创建filter 防止 seek 100ms 慢 @Atomic var cacheFilters: Array = Array() // 缓存个数 var cacheFiltersMaxCount: Int = 8 /// Use serial queue to ensure that the picture is smooth var createFiltersQueue: DispatchQueue! //是否显示高斯 public var showGaussianBlur:Bool = false // 渲染区view private lazy var renderView: RenderView = { let view = RenderView() view.backgroundColor = PQBFConfig.shared.styleBackGroundColor view.frame = self.bounds view.delegate = self let tap = UITapGestureRecognizer(target: self, action: #selector(RenderViewOnclick)) view.addGestureRecognizer(tap) return view }() // 暂停播放view lazy var playView: UIImageView = { let view = UIImageView(frame: CGRect(x: (self.frame.size.width - self.frame.size.height / 3.6) / 2, y: (self.frame.size.height - self.frame.size.height / 3.6) / 2, width: self.frame.size.height / 3.6, height: self.frame.size.height / 3.6)) view.image = UIImage().BF_Image(named: "gpuplayBtn").withRenderingMode(.alwaysTemplate) view.tintColor = UIColor.white view.isHidden = true return view }() // 暂停播放view lazy var playMaskView: UIView = { let playMaskView = UIView.init() playMaskView.backgroundColor = UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.5) playMaskView.isUserInteractionEnabled = false playMaskView.isHidden = true return playMaskView }() // 播放进度/总时长 lazy var progressLab: UILabel = { let titleLab = UILabel(frame: CGRect(x: (self.frame.size.width - 140) / 2, y: 0, width: 140, height: 12)) titleLab.font = UIFont.systemFont(ofSize: 12, weight: .medium) titleLab.textColor = UIColor.white titleLab.textAlignment = .center titleLab.text = "" titleLab.layer.shadowColor = UIColor.black.cgColor titleLab.layer.shadowOpacity = 0.3 titleLab.layer.shadowOffset = .zero titleLab.layer.shadowRadius = 1 // titleLab.backgroundColor = UIColor.hexColor(hexadecimal: "#FFFFFF",alpha: 0.3) // titleLab.addCorner(corner:7) return titleLab }() lazy var tipLab: UILabel = { let tipLab = UILabel(frame: CGRect(x: (self.frame.size.width - 100) / 2, y: (self.frame.size.height - 14) / 2, width: 100, height: 14)) tipLab.font = UIFont.systemFont(ofSize: 14, weight: .medium) tipLab.textColor = UIColor.white tipLab.textAlignment = .center tipLab.text = "资源加载中..." tipLab.layer.shadowColor = UIColor.white.cgColor tipLab.layer.shadowOpacity = 0.5 tipLab.layer.shadowOffset = .zero tipLab.layer.shadowRadius = 1 tipLab.isHidden = true return tipLab }() //进度的开始时间 var showProgressStartTime:Float = 0.0 required public init?(coder _: NSCoder) { fatalError("init(coder:) has not been implemented") } override public init(frame: CGRect) { super.init(frame: frame) addSubview(renderView) addSubview(progressLab) addSubview(playMaskView) addSubview(playView) backgroundColor = PQBFConfig.shared.styleBackGroundColor playerEmptyView = UIImageView(frame: bounds) playerEmptyView.backgroundColor = .black playerEmptyView.image = UIImage().BF_Image(named: "playEmpty") playerEmptyView.contentMode = .center addSubview(playerEmptyView) addSubview(tipLab) if #available(iOS 10.0, *) { createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil) } else { createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil) } if #available(iOS 10.0, *) { createFiltersQueue.activate() } } func showBorderLayer() { if borderLayer != nil { borderLayer?.removeFromSuperlayer() } // 线条颜色 borderLayer = CAShapeLayer() borderLayer?.strokeColor = UIColor.hexColor(hexadecimal: "#FFFFFF").cgColor borderLayer?.fillColor = nil borderLayer?.path = UIBezierPath(rect: CGRect(x: 1, y: 1, width: bounds.width - 2, height: bounds.height - 2)).cgPath borderLayer?.frame = bounds borderLayer?.lineWidth = 2.0 borderLayer?.lineCap = .round // 第一位是 线条长度 第二位是间距 nil时为实线 if borderLayer != nil { renderView.layer.addSublayer(borderLayer!) } if isAutoHiden { borderLayer?.opacity = 0 let groupAnimation = CAAnimationGroup() groupAnimation.beginTime = CACurrentMediaTime() groupAnimation.duration = 1 groupAnimation.fillMode = .forwards groupAnimation.isRemovedOnCompletion = true groupAnimation.repeatCount = 3 let opacity = CABasicAnimation(keyPath: "opacity") opacity.fromValue = 0 opacity.toValue = 1 opacity.isRemovedOnCompletion = true let opacity2 = CABasicAnimation(keyPath: "opacity") opacity2.fromValue = 1 opacity2.toValue = 0 opacity2.isRemovedOnCompletion = false groupAnimation.animations = [opacity, opacity2] borderLayer?.add(groupAnimation, forKey: nil) } } // 设置画布比例 public func resetCanvasFrame(frame: CGRect) { if self.frame.equalTo(frame) { FilterLog(message: "新老值一样,不重置") return } self.frame = frame mCanverSize = frame.size if isShowLine { showBorderLayer() } FilterLog(message: "new frame is \(frame)") renderView.isHidden = true renderView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height) renderView.resatSize() playerEmptyView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height) playMaskView.frame = CGRect.init(x: 0, y: 0, width: self.frame.width, height: self.frame.height) tipLab.frame = CGRect(x: (self.frame.size.width - 100) / 2, y: (self.frame.size.height - 14) / 2, width: 100, height: 14) progressLab.frame = CGRect(x: (self.frame.size.width - 140) / 2, y: 8, width: 140, height: 14) let bord = frame.size.width > frame.size.height ? CGFloat(60) : CGFloat(60) playView.frame = CGRect(x: (CGFloat(frame.size.width) - bord) / 2 , y: (CGFloat(frame.size.height) - bord) / 2, width: bord, height: bord) // playView.frame = CGRect(x: (self.frame.size.width - self.frame.size.height / 3.6) / 2, y: (self.frame.size.height - self.frame.size.height / 3.6) / 2, width: self.frame.size.height / 3.6, height: self.frame.size.height / 3.6) } override public func layoutSubviews() { super.layoutSubviews() } @objc func RenderViewOnclick() { if status == .playing { playView.isHidden = false playMaskView.isHidden = false pause() } else if status == .stop || status == .pause { playView.isHidden = true playMaskView.isHidden = true movie?.resume() speaker?.start() status = .playing } if renderViewOnClickHandle != nil { renderViewOnClickHandle!() } } func showPlayBtn(isHidden: Bool) { playView.isHidden = isHidden playMaskView.isHidden = isHidden } deinit { stop() movie = nil speaker = nil BFLog(1, message: "play view release") } /// XXXX 这里的 URL 使用的是全路径 ,如果不是全的会 crash ,方便复用 (不用处理业务的文件放在哪里) public func updateAsset(_ url: URL, videoComposition: AVVideoComposition? = nil, audioMixModel: PQVoiceModel? = nil, videoStickers: [PQEditVisionTrackMaterialsModel]? = nil,originMusicDuration:Float = 0,lastPoint:Float = 0,clipAudioRange: CMTimeRange = CMTimeRange.zero ) { // 每次初始化的时候设置初始值 为 nIl var audioMix: AVMutableAudioMix? var composition: AVMutableComposition? let asset = AVURLAsset(url: url, options: nil) BFLog(1, message: "播放器初始化的音频时长\(asset.duration.seconds) url is \(url),最终使用时长\(originMusicDuration),裁剪范围\(CMTimeGetSeconds(clipAudioRange.start)) 到 \(CMTimeGetSeconds(clipAudioRange.end))") self.asset = asset if (audioMixModel != nil && audioMixModel?.localPath != nil) || (videoStickers != nil && (videoStickers?.count ?? 0) > 0 || originMusicDuration != 0) { FilterLog(message: "有参加混音的数据。") (audioMix, composition) = PQPlayerViewModel.setupAudioMix(originAsset: asset, bgmData: audioMixModel, videoStickers: videoStickers,originMusicDuration:originMusicDuration,clipAudioRange: clipAudioRange) } else { audioMix = nil } isReadyToPlay = false asset.loadValuesAsynchronously(forKeys: ["tracks", "duration", "commonMetadata"]) { [weak self] in guard let strongSelf = self else { return } let tracksStatus = strongSelf.asset?.statusOfValue(forKey: AVAssetKey.tracks, error: nil) ?? .unknown let durationStatus = strongSelf.asset?.statusOfValue(forKey: AVAssetKey.duration, error: nil) ?? .unknown strongSelf.isReadyToPlay = tracksStatus == .loaded && durationStatus == .loaded } var audioSettings: [String: Any] = [ AVFormatIDKey: kAudioFormatLinearPCM, ] // if #available(iOS 14.0, *) { audioSettings[AVLinearPCMIsFloatKey] = false audioSettings[AVLinearPCMBitDepthKey] = 16 // } do { if composition != nil { FilterLog(message: "composition 方式初始化") movie = try PQMovieInput(asset: composition!, videoComposition: videoComposition, audioMix: audioMix, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings) // movie?.exportAudioUrl = url // clipAudioRange movie?.configAVPlayer(assetUrl: url, ranges: [clipAudioRange]) } else { movie = try PQMovieInput(url: url, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings) /* 测试代码 let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM] let bundleURL = Bundle.main.resourceURL! let movieURL = URL(string:"11111.mp4", relativeTo:bundleURL)! movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings) */ } movie!.runBenchmark = false movie!.synchronizedEncodingDebug = false } catch { status = .error if enableDebug { debugPrint(error) } } guard let movie = movie else { return } movie.progress = { [weak self] currTime, duration, prgressValue in guard let strongSelf = self else { return } // BFLog(1, message: " movie 进度\(currTime)") strongSelf.changeFilter(currTime: currTime) strongSelf.progress?(currTime, duration, prgressValue) DispatchQueue.main.async { strongSelf.playbackTime = currTime // Non-main thread change this property is not valid strongSelf.animationLayer?.timeOffset = strongSelf.playbackTime if strongSelf.showProgressLab { if(strongSelf.showProgressStartTime == 0 ){ strongSelf.showProgressStartTime = Float(CMTimeGetSeconds(strongSelf.movie?.startTime ?? .zero)) } if duration < 1 { strongSelf.progressLab.text = "\((currTime - Double(CMTimeGetSeconds(strongSelf.movie?.startTime ?? .zero))).formatDurationToHMS()) / 00:01" } else { var showTime = currTime - Double(strongSelf.showProgressStartTime) if (showTime < 0){ showTime = 0 } strongSelf.progressLab.text = "\(showTime.formatDurationToHMS()) / \( (duration - Double(strongSelf.showProgressStartTime)).formatDurationToHMS())" } } } } movie.completion = { [weak self] in guard let strongSelf = self else { return } //缓存已经用完,重新初始化缓存 if(strongSelf.filters.count == 0){ strongSelf.configCache(beginTime: strongSelf.mStickers?.first?.timelineIn ?? 0) } DispatchQueue.main.async { strongSelf.status = .stop strongSelf.finishedClosure?() strongSelf.showPlayBtn(isHidden: false) strongSelf.progress!(0,0,1) } } speaker = SpeakerOutput() movie.audioEncodingTarget = speaker applyFilters() } /// 初始化缓存,默认选创建 cacheFiltersMaxCount 个缓存 filterrs /// - Parameter beginTime: 开始缓存的开始时间,用在 seek操作时 老的缓存已经无效不能在使用了 func configCache(beginTime: Float64 ) { cacheFilters.removeAll() FilterLog(message: "原素材 总数:\(mStickers?.count ?? 0) ") if mStickers?.count ?? 0 > 0 { for (index, currentSticker) in mStickers!.enumerated() { BFLog(message: "mStickers timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) index : \(index)") //到达最大缓存数退出 if cacheFilters.count == cacheFiltersMaxCount { break } //小于缓存的开始时间继续查找 if(currentSticker.timelineOut < beginTime){ continue } var showFitler: PQBaseFilter? if currentSticker.type == StickerType.VIDEO.rawValue { showFitler = PQMovieFilter(movieSticker: currentSticker) } else if currentSticker.type == StickerType.IMAGE.rawValue { showFitler = PQImageFilter(sticker: currentSticker, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize) } if showFitler != nil { BFLog(message: " 加入到缓存 的 filter timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) in :\(currentSticker.model_in) out: \(currentSticker.out) index : \(index)") cacheFilters.append(showFitler!) } } for (index, filter) in cacheFilters.enumerated() { FilterLog(message: " 初始化 config create currentSticker timelinein \(String(describing: filter.stickerInfo?.timelineIn)) timelineout \(String(describing: filter.stickerInfo?.timelineOut)) in :\(String(describing: filter.stickerInfo?.model_in)) out \(String(describing: filter.stickerInfo?.out)) index\(index)") } if(cacheFilters.first != nil){ movie?.removeAllTargets() let showFilter: PQBaseFilter = cacheFilters.first! movie?.addTarget(showFilter, atTargetIndex: 0) showFilter.addTarget(renderView, atTargetIndex: 0) } } } //创建下一个filter 数据 func createNextFilter() { FilterLog(message: "加入前 当前的缓存个数为: \(cacheFilters.count) maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))") if cacheFilters.count <= cacheFiltersMaxCount { let showIndex = mStickers?.firstIndex(where: { (sticker) -> Bool in (cacheFilters.last?.stickerInfo == sticker) }) FilterLog(message: "当前显示的showIndex: \(String(describing: showIndex))") if ((showIndex ?? 0) + 1) < (mStickers?.count ?? 0) { let currentSticker = mStickers?[(showIndex ?? 0) + 1] if currentSticker != nil { var showFitler: PQBaseFilter? if currentSticker!.type == StickerType.VIDEO.rawValue { showFitler = PQMovieFilter(movieSticker: currentSticker!) } else if currentSticker!.type == StickerType.IMAGE.rawValue { showFitler = PQImageFilter(sticker: currentSticker!, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize) } if showFitler != nil { cacheFilters.append(showFitler!) } }else{ FilterLog(message: "缓存数据加入不成功!!!!!") } } FilterLog(message: "加入后 当前的缓存个数为: \(cacheFilters.count) maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))") } } /// 按时间从缓存中取出要显示的filter /// - Parameter currTime: 当前播放时间 func changeFilter(currTime: Float64) { // let starts:CFTimeInterval = CFAbsoluteTimeGetCurrent() BFLog(message: " 要查找的 currTime is \(currTime)") //1,删除已经显示过的 filter self.cacheFilters.removeAll(where: {(filter) -> Bool in (currTime > (filter.stickerInfo?.timelineOut ?? 0.0)) }) // 2,找出一个要显示的 fitler let showIndex = cacheFilters.firstIndex(where: { (filter) -> Bool in (currTime >= (filter.stickerInfo?.timelineIn ?? 0.0) && currTime <= (filter.stickerInfo?.timelineOut ?? 0.0)) }) if(showIndex == nil){ FilterLog(message: "缓存没有查找到?出现数据错误!!!!") return } let showFilter: PQBaseFilter = cacheFilters[showIndex ?? 0] FilterLog(message: "缓存操作 查找到命中的显示是为:\(currTime) 缓存数据timeline in :\(showFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(showFilter.stickerInfo?.timelineOut ?? 0.0) in:\(showFilter.stickerInfo?.model_in ?? 0.0) out:\(showFilter.stickerInfo?.out ?? 0.0) 缓存数 \(cacheFilters.count) index: \(String(describing: showIndex))") if(!(showFilter.isShow)){ FilterLog(message: "showIndex当前时间为 \(currTime) showIndex is \(String(describing: showIndex)) 显示 filter timelineIn is: \(String(describing: showFilter.stickerInfo?.timelineIn)) timelineOut is: \(String(describing: showFilter.stickerInfo?.timelineOut))") showFilter.isShow = true movie!.removeAllTargets() //为了优化性能只有素材宽高比和画面宽高比不一样时才做高斯 //原图的比例 let stickerAspectRatio = String(format: "%.6f", (showFilter.stickerInfo?.width ?? 0.0 ) / (showFilter.stickerInfo?.height ?? 0.0)) //画面的比例 let canverAspectRatio = String(format: "%.6f",(movie?.mShowVidoSize.width ?? 0.0) / (movie?.mShowVidoSize.height ?? 0.0)) if(showFilter.stickerInfo?.type == StickerType.IMAGE.rawValue && showGaussianBlur && Float(stickerAspectRatio) != Float(canverAspectRatio)){ FilterLog(message: "显示图片filter") // //高斯层 let blurStickerModel:PQEditVisionTrackMaterialsModel? = showFilter.stickerInfo?.copy() as? PQEditVisionTrackMaterialsModel blurStickerModel?.canvasFillType = stickerContentMode.aspectFillStr.rawValue if blurStickerModel == nil { FilterLog(message: "显示图片filter blurStickerModel is nil") return } let showGaussianFitler:PQBaseFilter = PQImageFilter(sticker: blurStickerModel!, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize) let iosb:GaussianBlur = GaussianBlur.init() iosb.blurRadiusInPixels = 20 showGaussianFitler.addTarget(iosb) self.movie?.addTarget(showGaussianFitler, atTargetIndex: 0) iosb.addTarget(showFilter,atTargetIndex: 0) showFilter.addTarget(self.renderView as ImageConsumer, atTargetIndex: 0) FilterLog(message: "filter 添加成功 注意是否添加成功。") // } }else{ movie?.addTarget(showFilter, atTargetIndex: 0) showFilter.addTarget(renderView, atTargetIndex: 0) } self.createFiltersQueue.async { self.createNextFilter() } }else{ FilterLog(message: " 添加过了 currTime is \(currTime) timelineIn:\(showFilter.stickerInfo?.timelineIn ?? 0.0)") } } /// 设置 filter 是否为 seek 状态 func setEnableSeek(isSeek: Bool) { for filter in filters { (filter as? PQBaseFilter)?.enableSeek = isSeek } } private func applyFilters() { guard let movie = movie else { return } movie.removeAllTargets() var currentTarget: ImageSource = movie filters.forEach { let f = $0 currentTarget.addTarget(f, atTargetIndex: 0) currentTarget = f } currentTarget.addTarget(renderView, atTargetIndex: 0) } } // MARK: Player control public extension PQGPUImagePlayerView { /// 开始播放 /// - Parameter pauseFirstFrame: 是否暂停到第一帧 func play(pauseFirstFrame: Bool = false, playeTimeRange: CMTimeRange = CMTimeRange()) { DispatchQueue.main.async { self.playerEmptyView.isHidden = true self.playView.isHidden = !pauseFirstFrame self.playMaskView.isHidden = !pauseFirstFrame self.renderView.isHidden = false self.progressLab.isHidden = false } // guard status != .playing else { // FilterLog(message: "已经是播放状态") // return // } // 如果没有设置开始结束时长 使用默认音频总时长(创作工具就不会传值) if CMTIMERANGE_IS_INVALID(playeTimeRange) { let endTime = CMTime(value: CMTimeValue(CMTimeGetSeconds(asset?.duration ?? .zero) * 600), timescale: 600) mPlayeTimeRange = CMTimeRange(start: .zero, end: endTime) } else { mPlayeTimeRange = playeTimeRange } // 清空音频缓存 speaker?.clearBuffer() movie?.start(timeRange: mPlayeTimeRange ?? CMTimeRange()) speaker?.start() status = pauseFirstFrame ? .pause : .playing showProgressStartTime = 0 } // 快进 func seek(to time: CMTime) { mPlayeTimeRange?.start = time play(pauseFirstFrame: false, playeTimeRange: mPlayeTimeRange ?? .zero) } // 暂停 func pause() { guard status != .pause else { return } movie?.pause() // 可能会引起crash: configureThread()里timebaseInfo为0,除法出错 speaker?.pause() status = .pause showPlayBtn(isHidden: false) } // 停止f解码状态 func stop() { // guard status != .stop else { // return // } movie?.cancel() speaker?.cancel() status = .stop } // 清空播放器状态,到空状态 func clearPlayerView() { playerEmptyView.isHidden = false renderView.isHidden = true progressLab.isHidden = true } // 显示提示文字 func showTip(show: Bool) { FilterLog(message: "showTip \(show)") tipLab.isHidden = !show if show { playerEmptyView.isHidden = true renderView.isHidden = true progressLab.isHidden = true } } } // MARK: Filter 操作 public extension PQGPUImagePlayerView { // 添加 filter func appendFilter(_ filter: ImageProcessingOperation) { filters.append(filter) } // 添加一组filters func appendFilters(_ newFilters: [ImageProcessingOperation]) { filters = filters + newFilters } // 移除所有filter func removeAllFilters() { filters.removeAll() } // 重置所有 filer func appendFiltersClearOldFilter(_ newFilters: [ImageProcessingOperation]) { filters.removeAll() filters = newFilters } } // MARK: - RenderViewDelegate extension PQGPUImagePlayerView: RenderViewDelegate{ public func willDisplayFramebuffer(renderView _: RenderView, framebuffer _: Framebuffer) { FilterLog(message: "willDisplayFramebuffer") } public func didDisplayFramebuffer(renderView _: RenderView, framebuffer: Framebuffer) { FilterLog(message: "didDisplayFramebuffer") } public func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool { FilterLog(message: "didDisplayFramebuffer") return false } }