浏览代码

Merge branch 'master' of https://git.yishihui.com/iOS/BFFramework

wenweiwei 3 年之前
父节点
当前提交
9b4d363544

+ 7 - 1
BFFramework/Classes/PQGPUImage/akfilters/PQBaseFilter.swift

@@ -21,6 +21,12 @@ open class PQBaseFilter: BasicOperation {
     
     //是否使用 seek 方案, seek 操作要通知到所有 filters
     var enableSeek:Bool = false
-
     
+    var isShow:Bool = false
+       //创建filter所使用的贴纸信息
+    var stickerInfo:PQEditVisionTrackMaterialsModel?
+
+    //析构数据
+    public func clearData()  {
+    }
 }

+ 12 - 6
BFFramework/Classes/PQGPUImage/akfilters/PQImageFilter.swift

@@ -36,6 +36,7 @@ open class PQImageFilter: PQBaseFilter {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
 
         mSticker = sticker
+        stickerInfo = sticker
 
         if mSticker!.locationPath.count == 0 {
             FilterLog(message: "图片数据为空,创建失败")
@@ -71,7 +72,8 @@ open class PQImageFilter: PQBaseFilter {
         }
 
         if newImage != nil {
-            BFLog(message: "提前加载图片。。。。timelineIn : \(String(describing: mSticker?.timelineIn)) timelineOut :\(mSticker?.timelineOut) \(mSticker?.locationPath)")
+            BFLog(message: "提前加载图片。。。。timelineIn : \(String(describing: mSticker?.timelineIn)) timelineOut :\(String(describing: mSticker?.timelineOut)) \(String(describing: mSticker?.locationPath))")
+            
             imageTexture = PQGPUImageTools.setupTexture(image: newImage!.cgImage!)
 
         } else { FilterLog(message: "image filter init error image data is nil!") }
@@ -114,13 +116,19 @@ open class PQImageFilter: PQBaseFilter {
 
         FilterLog(message: " image filter 当前时间: \(currTime) \(newImage!.size)")
 
-        if currTime >= mSticker!.timelineIn && currTime <= mSticker!.timelineOut {
+//        if currTime >= mSticker!.timelineIn && currTime <= mSticker!.timelineOut {
             FilterLog(message: " 显示图片当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  \(String(describing: newImage?.size))")
             // 取纹理坐标
             let textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: mSticker!, textureSize: newImage!.size, cannvasSize: inputSize)
 
             FilterLog(message: "textureCoordinates is \(textureCoordinates)")
-
+            
+            //imageTexture 有可能被析构导致黑屏
+            if(imageTexture == 0){
+                FilterLog(message: "imageTexture is error !!!!!")
+                imageTexture = PQGPUImageTools.setupTexture(image: newImage!.cgImage!)
+            }
+        
             let texturePropertiesimagetwo = InputTextureProperties(textureCoordinates: textureCoordinates, texture: imageTexture)
 
             let verticesPoint: [GLfloat] = PQGPUImageTools.getVerticesPoint(sticker: mSticker!, textureSize: newImage!.size, cannvasSize: inputSize)
@@ -130,8 +138,6 @@ open class PQImageFilter: PQBaseFilter {
 
                                  inputTextures: [texturePropertiesimagetwo])
             releaseIncomingFramebuffers()
-        } else {
-//            imageTexture = 0
-        }
+ 
     }
 }

+ 116 - 87
BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -70,15 +70,10 @@ class PQMovieFilter: PQBaseFilter {
     // Last sample time that played.
     public private(set) var currentTime: CMTime = .zero
 
-    public var loop: Bool?
-
     // Progress block of the video with a paramater value of 0-1.
     // Can be used to check video encoding progress. Not called from main thread.
     public var progress: ((Double) -> Void)?
 
-    // 开始绘制
-    var isStartReading: Bool = false
-
     var audioSettings: [String: Any]?
 
     var movieFramebuffer: Framebuffer?
@@ -93,7 +88,7 @@ class PQMovieFilter: PQBaseFilter {
     // 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
     var lastImageBuffer: CVImageBuffer?
     //
-    var currentRenderImageBuffer: CVPixelBuffer?
+    @Atomic var currentRenderImageBuffer: CVPixelBuffer?
     var currentRenderImageBufferTimeStamp: CMTime = .zero
     var currentRenderSampleBuffer: CMSampleBuffer?
     // 旋转角度值
@@ -103,26 +98,30 @@ class PQMovieFilter: PQBaseFilter {
 
     var timebaseInfo = mach_timebase_info_data_t()
 
-    // 缓存帧属性
-    var cacheFrameBufferMaxCount: Int = 16
-    // 缓存数量
-    @Atomic var cacheframeBuffers: Array = Array<CMSampleBuffer>.init()
-
     var currentThread: Thread?
     /// Use serial queue to ensure that the picture is smooth
     var seekQueue: DispatchQueue!
 
     // * 设置播放速率 范围 0 - 8(理论值) rate 正常速度为1.0;小于为慢速;大于为快速。但不能高于解码速度1-2ms硬解一帧
-    var speedRate: Float = 1.0
+    var speedRate: Float = 1
 
     // 原视频素材的 FPS
     var stickerFPS: Float = 0
-        
-    //开始时间,创建 filter 显示的时候有
+
+    // 开始时间,创建 filter 显示的时候有
     var startTimeStamp: CMTime?
-    
-    
-    deinit {}
+
+    deinit {
+        FilterLog(message: "movie filter deinit")
+        clearData()
+    }
+
+    public override func clearData() {
+        super.clearData()
+        if assetReader != nil {
+            assetReader?.cancelReading()
+        }
+    }
 
     public init(url: URL) {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
@@ -138,6 +137,7 @@ class PQMovieFilter: PQBaseFilter {
     public init(movieSticker: PQEditVisionTrackMaterialsModel) {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
         moveSticker = movieSticker
+        stickerInfo = movieSticker
         FilterLog(message: "资源裁剪的 开始时间\(moveSticker!.model_in)  结束时间: \(moveSticker!.out)")
         if moveSticker!.videoIsCrop() {
             requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in * 600), timescale: 600)
@@ -181,7 +181,7 @@ class PQMovieFilter: PQBaseFilter {
     open override func renderFrame() {
         let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
         inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
-  
+
         currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
         FilterLog(message: "duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
 
@@ -204,31 +204,23 @@ class PQMovieFilter: PQBaseFilter {
                 enableSeek = false
             }
 
-            if !isStartReading {
-                isStartReading = false
-                startReading()
-            }
-            
-            if(startTimeStamp == nil){
+            if startTimeStamp == nil {
                 startTimeStamp = currentTime
             }
-                        
+
             let stickerTime = CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600)
-          
+
             let PTSTime = CMTimeAdd(stickerTime, CMTimeSubtract(currentTime, startTimeStamp ?? .zero))
-            
-            let showBuffer  = getNextSampleBuffer(showTimeStamp: CMTime.init(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
-            
-            if(showBuffer != nil){
-                process(movieFrame:CMSampleBufferGetImageBuffer(showBuffer!)!, withSampleTime: currentTime)
-            }
+
+            readNextVideoFrame(showTimeStamp: CMTime(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
 
         } else {
-            isStartReading = false
-            FilterLog(message: "开始显示 movefilter 了 结束了\(String(describing: moveSticker?.timelineIn))  currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
+            FilterLog(message: "movefilter 了 结束了  timelineIN\(String(describing: moveSticker?.timelineIn)) timelineOut\(String(describing: moveSticker?.timelineOut)) currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪in:\(String(describing: moveSticker?.model_in))  裁剪out:\(String(describing: moveSticker?.out)) ")
             FilterLog(message: "不显示 movefilter 了")
 
             assetReader?.cancelReading()
+            // 重新初始化解码器
+            startReading()
         }
     }
 
@@ -253,7 +245,7 @@ class PQMovieFilter: PQBaseFilter {
 
     // MARK: -
 
-    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
+    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, audioSettings: [String: Any]? = nil) throws {
         asset = AVURLAsset(url: url, options: avAssertOptions)
 
         if asset != nil {
@@ -264,7 +256,7 @@ class PQMovieFilter: PQBaseFilter {
 
             self.videoComposition = videoComposition
             self.playAtActualSpeed = playAtActualSpeed
-            self.loop = loop
+
             yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
             self.audioSettings = audioSettings
         } else { FilterLog(message: "asset is nil") }
@@ -310,10 +302,10 @@ class PQMovieFilter: PQBaseFilter {
 
     open func startReading() {
         FilterLog(message: "开始初始化")
+        mach_timebase_info(&timebaseInfo)
 
         assetReader?.cancelReading()
 
-        isStartReading = true
         guard let assetReader = createReader() else {
             return // A return statement in this frame will end thread execution.
         }
@@ -332,7 +324,6 @@ class PQMovieFilter: PQBaseFilter {
             debugPrint("ERROR: Unable to start reading: \(error)")
             return
         }
- 
     }
 
     // 设置解码开始时间
@@ -341,74 +332,87 @@ class PQMovieFilter: PQBaseFilter {
         requestedStartTime = startTime
         startReading()
     }
-    
-    
-    func getNextSampleBuffer(showTimeStamp:CMTime) -> CMSampleBuffer? {
-        
-        BFLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))")
-        
-        let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
+
+    // 取出第一帧数据
+    func readNextVideoFrame(showTimeStamp: CMTime) {
         if assetReader == nil {
             FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
-            return nil
+            return
         }
+
         var videoTrackOutput: AVAssetReaderOutput?
         for output in assetReader!.outputs {
             if output.mediaType == AVMediaType.video {
                 videoTrackOutput = output
             }
         }
- 
-        var targetSampleBuffer: CMSampleBuffer?
-        
+
+        let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
+
+        var sampleBuffer: CMSampleBuffer?
+
+        // 日志使用 count
+        var count: Int = 0
         while assetReader?.status == .reading {
-  
-            targetSampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
-            if(targetSampleBuffer == nil){
-                BFLog(message: " copyNextSampleBuffer is nil error!!!")
-                return nil
+            count = count + 1
+            sampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
+            if sampleBuffer == nil {
+                FilterLog(message: " copyNextSampleBuffer is nil error!!!")
+                return
             }
-            let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
-            
-            //目标帧 时间
-            if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) > CMTimeGetSeconds(showTimeStamp){
-                let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
-                FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)))")
-  
-                return targetSampleBuffer
-
+            let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
+
+            // 目标帧 时间
+            if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= (CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out)) {
+                if sampleBuffer != nil {
+                    let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
+                    FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)) 查找次数\(count)  进场时间: \(String(describing: moveSticker?.timelineIn))  裁剪开始时间:\(String(describing: moveSticker?.model_in))")
+                    break
+                }
             }
-          
         }
-        
-        return nil
-        
-    }
-
-    func processCurrentBuffer(sampleBuffer: CMSampleBuffer) {
-        let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
-        let duration = asset!.duration // Only used for the progress block so its acuracy is not critical
-
-        sharedImageProcessingContext.runOperationSynchronously {
-//            self.process(movieFrame: sampleBuffer)
+        // 一,显示命中的帧数据
+        if sampleBuffer != nil {
+            if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
+                lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
+            }
 
-            FilterLog(message: "seek 时间为: 素材 \(String(describing: self.moveSticker?.locationPath)) 取出每一帧 显示时间: \(CMTimeGetSeconds(self.currentTime)) 帧时间 \(CMTimeGetSeconds(currentSampleTime))")
+            sharedImageProcessingContext.runOperationSynchronously {
+                self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
+            }
+            return
+        } else {
+            FilterLog(message: "sampleBuffer is  nil data is error self.assetReader?.status is \(String(describing: assetReader?.status))")
+            
+        }
+        // 二, 已经播放完一次
+        if assetReader?.status == .completed {
+            BFLog(message: "已经播放完一次")
+            // 1 自动循环模式 重头开始循环
+            if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
+                FilterLog(message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
 
-            // 视频filter 已经播放完一次了, 设置定帧数据 使用精准时间? INT
-            let outTime: Float64 = self.moveSticker!.out == 0 ? duration.seconds : self.moveSticker!.out
-            if (CMTimeGetSeconds(currentSampleTime) - outTime) < 0.033 && self.moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue && lastImageBuffer == nil {
-                FilterLog(message: "设置了定帧!!!")
-                lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
+                startReading()
 
-            } else {
-                FilterLog(message: "不能设置定帧!!!")
-                CMSampleBufferInvalidate(sampleBuffer)
+            } else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
+                // 2),定帧处理
+                if lastImageBuffer != nil {
+                    FilterLog(message: "处理显示定帧")
+                    let currTime = CMTimeGetSeconds(currentTime)
+                    FilterLog(message: "process time is \(currTime)")
+                    sharedImageProcessingContext.runOperationSynchronously {
+                        renderPixelBuffler(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
+                    }
+                }
             }
         }
     }
 
- 
-    func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
+    /// 渲染帧数据
+    /// - Parameters:
+    ///   - movieFrame:帧数据
+    ///   - withSampleTime: 渲染时间戳,不是帧的 PTS 是渲染的时间
+    func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
         let bufferHeight = CVPixelBufferGetHeight(movieFrame)
         let bufferWidth = CVPixelBufferGetWidth(movieFrame)
         CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
@@ -515,8 +519,33 @@ class PQMovieFilter: PQBaseFilter {
         secondChrominanceFramebuffer?.unlock()
     }
 
-    
     func nanosToAbs(_ nanos: UInt64) -> UInt64 {
         return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
     }
 }
+
+// 测试使用
+import VideoToolbox
+
+extension UIImage {
+    public convenience init?(pixelBuffer: CVPixelBuffer) {
+        var cgImage: CGImage?
+        VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
+
+        guard let cgImage = cgImage else {
+            return nil
+        }
+
+        self.init(cgImage: cgImage)
+    }
+    
+    func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String){
+       
+        if let imageData =  currentImage.jpegData(compressionQuality: persent) {
+               let fullPath = NSHomeDirectory().appending("/Documents/").appending(imageName)
+
+                try? imageData.write(to: URL(fileURLWithPath: fullPath))
+               print("fullPath=\(fullPath)")
+           }
+       }
+}

+ 27 - 27
BFFramework/Classes/PQGPUImage/akfilters/PQMovieInput.swift

@@ -117,10 +117,14 @@ public class PQMovieInput: ImageSource {
 
     // 是否为导出模式
     public   var mIsExport: Bool = false
+    
+    //打印开始时间
+    var debugStartTime:CFTimeInterval?
+
      // 画布的大小 注意要是偶数 要不在 IOS 13上会有绿边 自动放大到偶数
-   public var mShowVidoSize: CGSize = cVideoCannvasSizeOneToOne {
+    public var mShowVidoSize: CGSize = cVideoCannvasSizeOneToOne {
         didSet {
-            BFLog(message: "mShowVidoSize is move input  \(mShowVidoSize)")
+            FilterLog(message: "mShowVidoSize is move input  \(mShowVidoSize)")
             do {
                 displayLink?.isPaused = true
                 imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(mShowVidoSize.width), height: GLint(mShowVidoSize.height)), textureOnly: true)
@@ -131,8 +135,6 @@ public class PQMovieInput: ImageSource {
             }
         }
     }
-
-    // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works
     // 初始化方法
     public init(asset: AVAsset, videoComposition: AVVideoComposition?, audioMix: AVAudioMix?, playAtActualSpeed: Bool = false, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
         self.asset = asset
@@ -153,8 +155,8 @@ public class PQMovieInput: ImageSource {
         // 设置触发频率 这个周期可以通过frameInterval属性设置,CADisplayLink的selector每秒调用次数=60/frameInterval。比如当frameInterval设为2,每秒调用就变成30次
 
         displayLink?.frameInterval = 2
-        // 加入循环
-        displayLink?.add(to: RunLoop.main, forMode: RunLoop.Mode.default)
+        // 加入循环 要使用 common 不要让级别高的卡住回调事件
+        displayLink?.add(to: RunLoop.main, forMode: RunLoop.Mode.common)
         displayLink?.isPaused = true
         if #available(iOS 10.0, *) {
             seekQueue = DispatchQueue(label: "PQ.MovieInput.seeking", qos: .userInteractive, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
@@ -191,8 +193,8 @@ public class PQMovieInput: ImageSource {
         isPlay = false
         beginTime = 0
         currentTime = .zero
-        BFLog(message: "播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end))")
-      
+        FilterLog(message: "初始化播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end)) 播放总时长:\(CMTimeGetSeconds(timeRange.end) - CMTimeGetSeconds(timeRange.start))")
+  
         playeTimeRange = timeRange
         startTime = playeTimeRange.start
 
@@ -250,8 +252,10 @@ public class PQMovieInput: ImageSource {
 
             let prgressValue = currTime / duration
 
-            if prgressValue > 1 {
-                FilterLog(message: "播放完成")
+            FilterLog(message: "播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放进度当前时间:\(currTime) 进度:\(prgressValue)")
+            if prgressValue >= 1 {
+                FilterLog(message: "全部播放完成 总用时为:\(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
+ 
                 self.delegate?.didFinishMovie()
                 self.completion?()
  
@@ -276,6 +280,9 @@ public class PQMovieInput: ImageSource {
     // 开始 isFreeBuffer 是否是定首帧功能,定首帧就不处理声音了 会播放出来
     @objc public func start(isFreeBuffer: Bool, isExport: Bool = false,timeRange:CMTimeRange = CMTimeRange.init()) {
         
+        debugStartTime = CFAbsoluteTimeGetCurrent()
+        FilterLog(message: "开始播放的系统时钟时间 \(String(describing: debugStartTime))")
+        
         playeTimeRange = timeRange
         readerAudioTrackOutput = nil
         FilterLog(message: "PQMoveInput开始")
@@ -286,7 +293,7 @@ public class PQMovieInput: ImageSource {
 
         isPlay = true
         if assetReader == nil {
-            BFLog(message: "assetReader is null!!!!!")
+            FilterLog(message: "assetReader is null!!!!!")
             return
         }
 
@@ -411,7 +418,7 @@ public class PQMovieInput: ImageSource {
             }
 
             assetReader.timeRange = playeTimeRange
-            BFLog(message: "播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end))")
+            FilterLog(message: "播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end))")
        
 
             actualStartTime = nil
@@ -446,7 +453,7 @@ public class PQMovieInput: ImageSource {
 
     func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput: AVAssetReaderOutput) {
         if !isPlay {
-            BFLog(message: "自动停到首帧的不处理音频")
+            FilterLog(message: "自动停到首帧的不处理音频")
             return
         }
         /*
@@ -490,18 +497,7 @@ public class PQMovieInput: ImageSource {
 
             // 设置当前帧的时间戳
             imageFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime))
-//        print(" Framebuffer \(imageFramebuffer.texture)")
-
-//        glBindTexture(GLenum(GL_TEXTURE_2D), imageFramebuffer.texture)
-//        if true {
-//            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR)
-//        }
-//
-//        if true {
-//            glGenerateMipmap(GLenum(GL_TEXTURE_2D))
-//        }
-//        glBindTexture(GLenum(GL_TEXTURE_2D), 0)
-
+ 
             updateTargetsWithFramebuffer(imageFramebuffer)
 
             imageFramebuffer.unlock()
@@ -513,8 +509,12 @@ public class PQMovieInput: ImageSource {
             if runBenchmark {
                 let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime)
                 totalFrameTimeDuringCapture += currentFrameTime
-                print("Average frame time : \(1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent)) ms")
-                print("Current frame time : \(1000.0 * currentFrameTime) ms")
+             
+                FilterLog(message:"currentTime is \(CMTimeGetSeconds(currentTime))")
+                FilterLog(message:"Average frame time : \(1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent)) ms")
+                FilterLog(message:"Current frame time : \(1000.0 * currentFrameTime) ms")
+                FilterLog(message:"totalFrameTimeDuringCapture is \(totalFrameTimeDuringCapture)")
+
             }
 
             if mAutoPause {

+ 6 - 9
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointEditerController.swift

@@ -57,7 +57,8 @@ class PQStuckPointEditerController: PQBaseViewController {
         let playerView = PQGPUImagePlayerView(frame: CGRect(x: 0, y: navHeadImageView?.frame.maxY ?? 0, width: playerHeight, height: playerHeight))
         playerView.backgroundColor = PQBFConfig.shared.styleBackGroundColor
         playerView.isShowLine = false
-//        playerView.showGaussianBlur = true
+        playerView.showGaussianBlur = true
+        playerView.playerEmptyView.isHidden = true
         return playerView
     }()
 
@@ -380,8 +381,7 @@ class PQStuckPointEditerController: PQBaseViewController {
         projectModel.sData?.videoMetaData?.videoHeight = Int(videoSize.height)
 
         // 2,创建滤镜
-        
-        DispatchQueue.global().async { // 并行、异步
+      
             
             let beginTime: TimeInterval = Date().timeIntervalSince1970
             self.mStickers = self.createStickers(sections: self.projectModel.sData?.sections ?? List(), inputSize: CGSize(width: CGFloat(self.projectModel.sData?.videoMetaData?.videoWidth ?? 0), height: CGFloat(self.projectModel.sData?.videoMetaData?.videoHeight ?? 0)))
@@ -389,10 +389,7 @@ class PQStuckPointEditerController: PQBaseViewController {
             
             let end: TimeInterval = Date().timeIntervalSince1970
             BFLog(message: "createStickers tiskskskskme  \(end - beginTime)")
-            
-            DispatchQueue.main.async { // 串行、异步
-            
-            
+ 
                 // 3,设置音频
                 let audioPath = self.stuckPointMusicData?.localPath ?? ""
                 BFLog(message: "初始化音频播放器的音频地址为:\(audioPath)")
@@ -432,9 +429,9 @@ class PQStuckPointEditerController: PQBaseViewController {
                     if self?.synchroMarskView.superview != nil {
                         self?.synchroMarskView.removeMarskView()
                     }
-                }
+                
  
-            }
+        
         }
 
 

+ 212 - 98
BFFramework/Classes/Stuckpoint/ViewModel/PQGPUImagePlayerView.swift

@@ -27,25 +27,8 @@ public enum PQGPUImagePlayerViewStatus: Int {
     case unknow = -1000
 }
 
-public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
-    public func willDisplayFramebuffer(renderView _: RenderView, framebuffer _: Framebuffer) {}
-
-    public func didDisplayFramebuffer(renderView _: RenderView, framebuffer: Framebuffer) {
-//        if(renderView.bounds.size.width = framebuffer.size.width && renderView.bounds.size.height = framebuffer.size.height){
-
-        if GLint(mCanverSize.width) == framebuffer.size.width, GLint(mCanverSize.height) == framebuffer.size.height {
-//        DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
-
-//        DispatchQueue.main.async {
-//            renderView.isHidden = false
-        }
-//        }
-    }
-
-    public func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool {
-        return false
-    }
-
+public class PQGPUImagePlayerView: UIView {
+     
     public private(set) var playbackTime: TimeInterval = 0 {
         willSet {
             playbackTimeChangeClosure?(newValue)
@@ -143,15 +126,25 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
 
     public var mPlayeTimeRange: CMTimeRange?
 
-    public  var mStickers: [PQEditVisionTrackMaterialsModel]?
-
-    // 最后一次显示的sticker
-    public  var lastshowSticker: PQEditVisionTrackMaterialsModel?
-
+    var mStickers: [PQEditVisionTrackMaterialsModel]? {
+        didSet {
+            
+            FilterLog(message: "设置线程为: \(Thread.current) \(OperationQueue.current?.underlyingQueue?.label as Any)")
+        
+            configCache()
+        }
+    }
+ 
     // 是否显示时间条
-    public  var showProgressLab: Bool = true
-
-    public   var cacheFilters: [PQBaseFilter] = Array()
+    var showProgressLab: Bool = true
+
+    // 缓存创建filter 防止 seek 100ms 慢
+    @Atomic var cacheFilters: Array<PQBaseFilter> = Array()
+    // 缓存个数
+    var cacheFiltersMaxCount: Int = 8
+  
+    /// Use serial queue to ensure that the picture is smooth
+    var createFiltersQueue: DispatchQueue!
     
     //是否显示高斯
     public  var showGaussianBlur:Bool = false
@@ -170,7 +163,7 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
     // 暂停播放view
     lazy var playView: UIImageView = {
         let view = UIImageView(frame: CGRect(x: (self.frame.size.width - 52) / 2, y: (self.frame.size.height - 52) / 2, width: 52, height: 52))
-        view.image = UIImage().BF_Image(named: "gpuplayBtn")
+        view.image =  UIImage().BF_Image(named: "gpuplayBtn")
         view.isHidden = true
         return view
 
@@ -218,14 +211,22 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         addSubview(playView)
         addSubview(progressLab)
         backgroundColor = PQBFConfig.shared.styleBackGroundColor
-
         playerEmptyView = UIImageView(frame: bounds)
-        playerEmptyView.backgroundColor = PQBFConfig.shared.styleBackGroundColor
-        playerEmptyView.image = UIImage().BF_Image(named: "playEmpty")
+        playerEmptyView.backgroundColor = .black
+        playerEmptyView.image =  UIImage().BF_Image(named: "playEmpty")
         playerEmptyView.contentMode = .center
         addSubview(playerEmptyView)
 
         addSubview(tipLab)
+        
+        if #available(iOS 10.0, *) {
+            createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
+        } else {
+            createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
+        }
+        if #available(iOS 10.0, *) {
+            createFiltersQueue.activate()
+        }
     }
 
     func showBorderLayer() {
@@ -272,7 +273,7 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
     // 设置画布比例
    public func resetCanvasFrame(frame: CGRect) {
         if self.frame.equalTo(frame) {
-            BFLog(message: "新老值一样,不重置")
+            FilterLog(message: "新老值一样,不重置")
             return
         }
 
@@ -282,7 +283,7 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
             showBorderLayer()
         }
 
-        BFLog(message: "new frame is \(frame)")
+        FilterLog(message: "new frame is \(frame)")
         renderView.isHidden = true
         renderView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height)
         renderView.resatSize()
@@ -331,10 +332,10 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         var composition: AVMutableComposition?
 
         let asset = AVURLAsset(url: url, options: nil)
-        BFLog(message: "播放器初始化的音频时长\(asset.duration.seconds)  url is \(url)")
+        FilterLog(message: "播放器初始化的音频时长\(asset.duration.seconds)  url is \(url)")
         self.asset = asset
         if (audioMixModel != nil && audioMixModel?.localPath != nil) || (videoStickers != nil && (videoStickers?.count ?? 0) > 0) {
-            BFLog(message: "有参加混音的数据。")
+            FilterLog(message: "有参加混音的数据。")
             (audioMix, composition) = PQPlayerViewModel.setupAudioMix(originAsset: asset, bgmData: audioMixModel, videoStickers: videoStickers)
         } else {
             audioMix = nil
@@ -356,7 +357,7 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         }
         do {
             if composition != nil {
-                BFLog(message: "composition 方式初始化")
+                FilterLog(message: "composition 方式初始化")
                 movie = try PQMovieInput(asset: composition!, videoComposition: videoComposition, audioMix: audioMix, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings)
             } else {
                 movie = try PQMovieInput(url: url, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings)
@@ -382,8 +383,9 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         movie.progress = { [weak self, movie] currTime, duration, prgressValue in
             guard let strongSelf = self else { return }
 
-            self?.findShowStikcer(currTime: movie.currentTime.seconds)
-
+            FilterLog(message: " movie.currentTime.seconds 进度\(movie.currentTime.seconds)")
+            self?.changeFilter(currTime: movie.currentTime.seconds)
+            
             self?.progress?(currTime, duration, prgressValue)
 
             DispatchQueue.main.async {
@@ -402,6 +404,11 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         }
         movie.completion = { [weak self] in
             guard let strongSelf = self else { return }
+            //缓存已经用完,重新初始化缓存
+            if(strongSelf.filters.count == 0){
+                strongSelf.configCache()
+            }
+            
             DispatchQueue.main.async {
                 strongSelf.status = .stop
                 strongSelf.finishedClosure?()
@@ -414,71 +421,164 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
         applyFilters()
     }
 
-    func findShowStikcer(currTime: Float64) {
-        if mStickers?.count ?? 0 == 0 {
-            BFLog(message: "mStickers data is error")
-            return
-        }
-        var currentSticker: PQEditVisionTrackMaterialsModel?
-        var currentIdenx: Int = 0
-        for (index, sticker) in mStickers!.enumerated() {
-            if sticker.timelineIn <= currTime, sticker.timelineOut >= currTime {
-                currentSticker = sticker
-                currentIdenx = index
+    // 初始化缓存,默认选创建 cacheFiltersMaxCount 个缓存 filterrs
+    func configCache() {
+        cacheFilters.removeAll()
+        FilterLog(message: "原素材 总数:\(mStickers?.count ?? 0) ")
+       
+        if mStickers?.count ?? 0 > 0 {
+            
+            for (index , sticker) in mStickers!.enumerated() {
+                print( "mStickers timelinein:\(sticker.timelineIn) timelineout: \(sticker.timelineOut) index : \(index)")
 
-                break
             }
-        }
-        // 创建不同的filter
-        if currentSticker == nil {
-            BFLog(message: "sticker data is error")
-            return
-        }
-
-        //
-        if movie != nil && currentSticker != lastshowSticker {
-            BFLog(message: "sticker timelineIn is: \(currentSticker!.timelineIn) timelineOut \(currentSticker!.timelineOut)    in is :\(currentSticker!.model_in) in out is :\(currentSticker!.out)  sticker location::: \(String(describing: currentSticker?.locationPath))")
+            
+            for (index, currentSticker) in mStickers!.enumerated() {
+               //到达最大缓存数退出
+                if index == cacheFiltersMaxCount {
+                    break
+                }
+                var showFitler: PQBaseFilter?
+                if currentSticker.type == StickerType.VIDEO.rawValue {
+                    showFitler = PQMovieFilter(movieSticker: currentSticker)
 
-            var showFitler: PQBaseFilter?
-            if currentSticker!.type == StickerType.VIDEO.rawValue {
-                showFitler = PQMovieFilter(movieSticker: currentSticker!)
+                } else if currentSticker.type == StickerType.IMAGE.rawValue {
+                    showFitler = PQImageFilter(sticker: currentSticker)
+                }
+                if showFitler != nil {
+                    print( " 加入到缓存 的 filter timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) in :\(currentSticker.model_in) out: \(currentSticker.out) index : \(index)")
+                    cacheFilters.append(showFitler!)
+                }
 
-            } else if currentSticker!.type == StickerType.IMAGE.rawValue {
-                showFitler = PQImageFilter(sticker: currentSticker!)
             }
+            
+            
+            for (index, filter) in cacheFilters.enumerated() {
+                FilterLog(message: " 初始化 config create currentSticker timelinein \(String(describing: filter.stickerInfo?.timelineIn)) timelineout \(String(describing: filter.stickerInfo?.timelineOut))  in :\(String(describing: filter.stickerInfo?.model_in)) out \(String(describing: filter.stickerInfo?.out))  index\(index)")
+            }
+            
+            if(cacheFilters.first != nil){
+                movie?.removeAllTargets()
+                let showFilter: PQBaseFilter = cacheFilters.first!
+                movie?.addTarget(showFilter, atTargetIndex: 0)
+                showFilter.addTarget(renderView, atTargetIndex: 0)
+            }
+      
+        }
 
-            movie!.removeAllTargets()
-            let currentTarget: ImageSource = movie!
+   
+
+  
+    }
+
+    //创建下一个filter 数据
+    func createNextFilter() {
+        FilterLog(message: "加入前 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
+          if cacheFilters.count <=  cacheFiltersMaxCount {
+              let showIndex = mStickers?.firstIndex(where: { (sticker) -> Bool in
+                (cacheFilters.last?.stickerInfo == sticker)
+              })
+                FilterLog(message: "当前显示的showIndex: \(String(describing: showIndex))")
+              if ((showIndex ?? 0) + 1) < (mStickers?.count ?? 0) {
+                  let currentSticker = mStickers?[(showIndex ?? 0) + 1]
+                  if currentSticker != nil {
+                      var showFitler: PQBaseFilter?
+                      if currentSticker!.type == StickerType.VIDEO.rawValue {
+                          showFitler = PQMovieFilter(movieSticker: currentSticker!)
+
+                      } else if currentSticker!.type == StickerType.IMAGE.rawValue {
+                          showFitler = PQImageFilter(sticker: currentSticker!)
+                      }
+                      if showFitler != nil {
+
+                          cacheFilters.append(showFitler!)
+                      }
+                  }else{
+                    FilterLog(message: "缓存数据加入不成功!!!!!")
+                  }
+              }
+            
+            FilterLog(message: "加入后 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
+             
+          }
+        
+        
+      }
+ 
+    
+    /// 按时间从缓存中取出要显示的filter
+    /// - Parameter currTime: 当前播放时间
+    func changeFilter(currTime: Float64) {
+        FilterLog(message: " 要查找的 currTime is \(currTime)")
+        //1,删除已经显示过的 filter
+        self.cacheFilters.removeAll(where: {(filter) -> Bool in
+
+            (currTime > (filter.stickerInfo?.timelineOut ?? 0.0))
+        })
+ 
+        // 2,找出一个要显示的 fitler
+        let showIndex = cacheFilters.firstIndex(where: { (filter) -> Bool in
+            (currTime >= (filter.stickerInfo?.timelineIn ?? 0.0) && currTime <= (filter.stickerInfo?.timelineOut ?? 0.0))
+        })
+  
+        if(showIndex == nil){
+          
+            for (index,bsFilter )in cacheFilters.enumerated() {
+                print( "22222缓存操作   没要查找到要查找的显示是为:\(currTime) 缓存数据timeline in :\(bsFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(bsFilter.stickerInfo?.timelineOut ?? 0.0) 缓存数 \(cacheFilters.count) index: \(index)")
+            }
+            
+            return
+        }
+    
+        let showFilter: PQBaseFilter = cacheFilters[showIndex ?? 0]
+        
+        print("缓存操作   查找到命中的显示是为:\(currTime) 缓存数据timeline in :\(showFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(showFilter.stickerInfo?.timelineOut ?? 0.0) 缓存数 \(cacheFilters.count) index: \(String(describing: showIndex))")
+        
+        if(!(showFilter.isShow)){
+            FilterLog(message: "showIndex当前时间为  \(currTime) showIndex is \(String(describing: showIndex)) 显示 filter timein is: \(String(describing: showFilter.stickerInfo?.timelineIn)) timeout is: \(String(describing: showFilter.stickerInfo?.timelineOut))")
+ 
+            showFilter.isShow = true
             
-               if(currentSticker?.type == StickerType.IMAGE.rawValue && showGaussianBlur){
-                   //高斯层
-                   let json = currentSticker?.toJSONString(prettyPrint: false)
-                   if json == nil {
-                       BFLog(message: "数据转换有问题 跳转")
-                       return
-                   }
-                   let blurStickerModel: PQEditVisionTrackMaterialsModel? = Mapper<PQEditVisionTrackMaterialsModel>().map(JSONString: json!)
-                   blurStickerModel?.canvasFillType = stickerContentMode.aspectFillStr.rawValue
-                   let showGaussianFitler:PQBaseFilter = PQImageFilter(sticker: blurStickerModel!)
-                   
-                   let iosb:GaussianBlur = GaussianBlur.init()
-                   iosb.blurRadiusInPixels = 20
-                   showGaussianFitler.addTarget(iosb)
-                   
-                   currentTarget.addTarget(showGaussianFitler, atTargetIndex: 0)
-                   
-                   iosb.addTarget(showFitler!)
+            movie!.removeAllTargets()
         
-                   showFitler?.addTarget(renderView, atTargetIndex: 0)
-               }else{
-                   currentTarget.addTarget(showFitler!, atTargetIndex: 0)
-                   showFitler?.addTarget(renderView, atTargetIndex: 0)
+            if(showFilter.stickerInfo?.type == StickerType.IMAGE.rawValue && showGaussianBlur){
+              
+                self.createFiltersQueue.async {
+                    //高斯层
+                    let json = showFilter.stickerInfo?.toJSONString(prettyPrint: false)
+                    if json == nil {
+                        FilterLog(message: "数据转换有问题 跳转")
+                        return
+                    }
 
-               }
+                    let blurStickerModel: PQEditVisionTrackMaterialsModel? = Mapper<PQEditVisionTrackMaterialsModel>().map(JSONString: json!)
+                    blurStickerModel?.canvasFillType = stickerContentMode.aspectFillStr.rawValue
+                    let showGaussianFitler:PQBaseFilter = PQImageFilter(sticker: blurStickerModel!)
+                    
+                    let iosb:GaussianBlur = GaussianBlur.init()
+                    iosb.blurRadiusInPixels = 20
+                    showGaussianFitler.addTarget(iosb)
+                    
+                    sharedImageProcessingContext.runOperationAsynchronously{ [self] in
+        
+                        self.movie?.addTarget(showGaussianFitler, atTargetIndex: 0)
+                        iosb.addTarget(showFilter)
+                        showFilter.addTarget(self.renderView as! ImageConsumer, atTargetIndex: 0)
+                        
+                    }
+                }
+            
            
+                
+            }else{
+                movie?.addTarget(showFilter, atTargetIndex: 0)
+                showFilter.addTarget(renderView, atTargetIndex: 0)
 
+            }
+            self.createFiltersQueue.async {
+                self.createNextFilter()
+            }
 
-            lastshowSticker = currentSticker
         }
     }
 
@@ -515,7 +615,7 @@ public extension PQGPUImagePlayerView {
             self.progressLab.isHidden = false
         }
 //        guard status != .playing else {
-//            BFLog(message: "已经是播放状态")
+//            FilterLog(message: "已经是播放状态")
 //            return
 //        }
 
@@ -574,7 +674,7 @@ public extension PQGPUImagePlayerView {
 
     // 显示提示文字
     func showTip(show: Bool) {
-        BFLog(message: "showTip \(show)")
+        FilterLog(message: "showTip \(show)")
         tipLab.isHidden = !show
         if show {
             playerEmptyView.isHidden = true
@@ -609,9 +709,23 @@ public extension PQGPUImagePlayerView {
         filters = newFilters
     }
 
-//    // 重置所有 filer
-//    func appendStickers(stickers: [PQEditVisionTrackMaterialsModel]) {
-//
-//        mStickers = stickers
-//    }
 }
+
+// MARK: - RenderViewDelegate
+extension PQGPUImagePlayerView: RenderViewDelegate{
+    public func willDisplayFramebuffer(renderView _: RenderView, framebuffer _: Framebuffer) {
+        FilterLog(message: "willDisplayFramebuffer")
+    }
+
+    public func didDisplayFramebuffer(renderView _: RenderView, framebuffer: Framebuffer) {
+        FilterLog(message: "didDisplayFramebuffer")
+    }
+
+    public func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool {
+        FilterLog(message: "didDisplayFramebuffer")
+        
+        return false
+    }
+}
+
+

+ 1 - 1
BFFramework/Classes/Utils/PQSingletonEnvUtil.swift

@@ -10,7 +10,7 @@ import Foundation
 import KeychainAccess
 
 // add by ak 开发和发布版本的不同设置  1 为正式版本发布的苹果  0 是测试 XXXX 上传苹果前要检查
-public let DEVELOPMENT_ENVIRONMENT: Int = 0
+public let DEVELOPMENT_ENVIRONMENT: Int = 1
 
 public enum ENVMode: String {
     case ENVModeOnline // 线上环境