浏览代码

1,删除不使用代码,2 ,处理 定帧和自动循环逻辑

jsonwang 3 年之前
父节点
当前提交
d2a5e3c0c3
共有 1 个文件被更改,包括 61 次插入231 次删除
  1. 61 231
      BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

+ 61 - 231
BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -70,12 +70,10 @@ class PQMovieFilter: PQBaseFilter {
     // Last sample time that played.
     public private(set) var currentTime: CMTime = .zero
 
-    public var loop: Bool?
-
     // Progress block of the video with a paramater value of 0-1.
     // Can be used to check video encoding progress. Not called from main thread.
     public var progress: ((Double) -> Void)?
- 
+
     var audioSettings: [String: Any]?
 
     var movieFramebuffer: Framebuffer?
@@ -100,39 +98,31 @@ class PQMovieFilter: PQBaseFilter {
 
     var timebaseInfo = mach_timebase_info_data_t()
 
-    // 缓存帧属性
-    var cacheFrameBufferMaxCount: Int = 16
-    // 缓存数量
-    @Atomic var cacheframeBuffers: Array = Array<CMSampleBuffer>.init()
-
     var currentThread: Thread?
     /// Use serial queue to ensure that the picture is smooth
     var seekQueue: DispatchQueue!
 
     // * 设置播放速率 范围 0 - 8(理论值) rate 正常速度为1.0;小于为慢速;大于为快速。但不能高于解码速度1-2ms硬解一帧
-    var speedRate: Float = 1.0
+    var speedRate: Float = 1
 
     // 原视频素材的 FPS
     var stickerFPS: Float = 0
-        
-    //开始时间,创建 filter 显示的时候有
+
+    // 开始时间,创建 filter 显示的时候有
     var startTimeStamp: CMTime?
-    
-    
+
     deinit {
         BFLog(message: "movie filter deinit")
         clearData()
     }
-    
-    public override func clearData()  {
+
+    public override func clearData() {
         super.clearData()
-        if(assetReader != nil){
+        if assetReader != nil {
             assetReader?.cancelReading()
         }
-    
     }
 
-
     public init(url: URL) {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
 
@@ -191,7 +181,7 @@ class PQMovieFilter: PQBaseFilter {
     open override func renderFrame() {
         let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
         inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
-  
+
         currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
         FilterLog(message: "duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
 
@@ -214,22 +204,23 @@ class PQMovieFilter: PQBaseFilter {
                 enableSeek = false
             }
 
-            if(startTimeStamp == nil){
+            if startTimeStamp == nil {
                 startTimeStamp = currentTime
             }
-                        
+
             let stickerTime = CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600)
-          
+
             let PTSTime = CMTimeAdd(stickerTime, CMTimeSubtract(currentTime, startTimeStamp ?? .zero))
- 
-            readNextVideoFrame(showTimeStamp: CMTime.init(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
+
+            readNextVideoFrame(showTimeStamp: CMTime(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
 
         } else {
-          
             FilterLog(message: "movefilter 了 结束了  timelineIN\(String(describing: moveSticker?.timelineIn)) timelineOut\(String(describing: moveSticker?.timelineOut)) currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪in:\(String(describing: moveSticker?.model_in))  裁剪out:\(String(describing: moveSticker?.out)) ")
             FilterLog(message: "不显示 movefilter 了")
 
             assetReader?.cancelReading()
+            // 重新初始化解码器
+            startReading()
         }
     }
 
@@ -254,7 +245,7 @@ class PQMovieFilter: PQBaseFilter {
 
     // MARK: -
 
-    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
+    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, audioSettings: [String: Any]? = nil) throws {
         asset = AVURLAsset(url: url, options: avAssertOptions)
 
         if asset != nil {
@@ -265,7 +256,7 @@ class PQMovieFilter: PQBaseFilter {
 
             self.videoComposition = videoComposition
             self.playAtActualSpeed = playAtActualSpeed
-            self.loop = loop
+
             yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
             self.audioSettings = audioSettings
         } else { FilterLog(message: "asset is nil") }
@@ -311,9 +302,10 @@ class PQMovieFilter: PQBaseFilter {
 
     open func startReading() {
         FilterLog(message: "开始初始化")
+        mach_timebase_info(&timebaseInfo)
 
         assetReader?.cancelReading()
- 
+
         guard let assetReader = createReader() else {
             return // A return statement in this frame will end thread execution.
         }
@@ -332,7 +324,6 @@ class PQMovieFilter: PQBaseFilter {
             debugPrint("ERROR: Unable to start reading: \(error)")
             return
         }
- 
     }
 
     // 设置解码开始时间
@@ -341,11 +332,9 @@ class PQMovieFilter: PQBaseFilter {
         requestedStartTime = startTime
         startReading()
     }
-    
+
     // 取出第一帧数据
-    func readNextVideoFrame(showTimeStamp:CMTime) {
-        
-        
+    func readNextVideoFrame(showTimeStamp: CMTime) {
         if assetReader == nil {
             FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
             return
@@ -357,230 +346,72 @@ class PQMovieFilter: PQBaseFilter {
                 videoTrackOutput = output
             }
         }
-   
 
         let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
-        
+
         var sampleBuffer: CMSampleBuffer?
-        
-        //日志使用 count
-        var count: Int  = 0
-        while self.assetReader?.status == .reading {
- 
+
+        // 日志使用 count
+        var count: Int = 0
+        while assetReader?.status == .reading {
             count = count + 1
             sampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
-            if(sampleBuffer == nil){
+            if sampleBuffer == nil {
                 BFLog(message: " copyNextSampleBuffer is nil error!!!")
                 return
             }
             let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
-            
-            //目标帧 时间
-            if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp){
-                
-                let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
-                FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)) 查找次数\(count)  进场时间: \(String(describing: moveSticker?.timelineIn))  裁剪开始时间:\(String(describing: moveSticker?.model_in))")
-                break
+
+            // 目标帧 时间
+            if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= (CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out)) {
+                if sampleBuffer != nil {
+                    let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
+                    FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)) 查找次数\(count)  进场时间: \(String(describing: moveSticker?.timelineIn))  裁剪开始时间:\(String(describing: moveSticker?.model_in))")
+                    break
+                }
             }
         }
-        
-    
-        
-//        if lastImageBuffer != nil {
-//            FilterLog(message: "处理显示定帧")
-//            let currTime = CMTimeGetSeconds(currentTime)
-//            FilterLog(message: "process time is \(currTime)")
-//            process(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
-//        }else{
-//            processCurrentBuffer(sampleBuffer: sampleBuffer!)
-//
-//            lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)
-//
-//        }
-        if(sampleBuffer != nil){
-            lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
-            processCurrentBuffer(sampleBuffer: sampleBuffer!)
-        }else{
-            FilterLog(message: "sampleBuffer is  nil data is error")
-        }
-   
-        
-        
-       
-        
-        /*
-        if sampleBuffer == nil && assetReader?.status == .completed {
-            FilterLog(message: "已经播放完成了 \(CMTimeGetSeconds(currentTime))")
-            // 如果不是自动循环模式 且 定帧/裁剪模式 播放完一次后自动停止
-
-            if moveSticker?.materialDurationFit?.fitType != adapterMode.loopAuto.rawValue, moveSticker?.materialDurationFit?.fitType != adapterMode.staticFrame.rawValue {
-                return
+        // 一,显示命中的帧数据
+        if sampleBuffer != nil {
+            if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
+                lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
             }
-            // 自动循环模式 重头开始循环
+
+            sharedImageProcessingContext.runOperationSynchronously {
+                self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
+            }
+            return
+        } else {
+            FilterLog(message: "sampleBuffer is  nil data is error self.assetReader?.status is \(String(describing: assetReader?.status))")
+            
+        }
+        // 二, 已经播放完一次
+        if assetReader?.status == .completed {
+            // 1 自动循环模式 重头开始循环
             if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
                 FilterLog(message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
-                isStartReading = false
+
                 startReading()
 
             } else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
+                // 2),定帧处理
                 if lastImageBuffer != nil {
                     FilterLog(message: "处理显示定帧")
-                    let currTime = CMTimeGetSeconds(currentTime!)
+                    let currTime = CMTimeGetSeconds(currentTime)
                     FilterLog(message: "process time is \(currTime)")
-                    process(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
-                }
-
-                return
-            }
-
-            return
-        } else {
-            FilterLog(message: "copy sampleBuffer is error ??? \(String(describing: assetReader?.status)) \(sampleBuffer)")
-        }
-
-        if sampleBuffer != nil {
-            if enableSeek {
-                BFLog(message: "cacheframeBuffers 添加后 个数\(cacheframeBuffers.count)")
-                cacheframeBuffers.append(sampleBuffer!)
-            } else {
-                // 正常处理每一帧
-                processCurrentBuffer(sampleBuffer: sampleBuffer!)
-            }
-        }
- 
- */
-    }
-    
-    /*
-    func getNextSampleBufferQueueAsync(showTimeStamp:CMTime){
-        
-        BFLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))")
-        
-        seekQueue.async {
-            
-            let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
-            if self.assetReader == nil {
-                FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
-//                return
-            }
-            var videoTrackOutput: AVAssetReaderOutput?
-            for output in self.assetReader!.outputs {
-                if output.mediaType == AVMediaType.video {
-                    videoTrackOutput = output
-                }
-            }
-     
-            var targetSampleBuffer: CMSampleBuffer?
-            
-            while self.assetReader?.status == .reading {
-      
-                targetSampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
-                if(targetSampleBuffer == nil){
-                    BFLog(message: " copyNextSampleBuffer is nil error!!!")
-                    return
-                }
-                let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
-                
-                //目标帧 时间
-                if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) > CMTimeGetSeconds(showTimeStamp){
-
-                    let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
-                    FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)))")
-      
-                 
                     sharedImageProcessingContext.runOperationSynchronously {
-                        self.currentRenderImageBuffer = CMSampleBufferGetImageBuffer(targetSampleBuffer!)!
-//                        self.process(movieFrame:self.currentRenderImageBuffer!, withSampleTime: self.currentTime)
-                        self.processCurrentBuffer(sampleBuffer: targetSampleBuffer!)
+                        renderPixelBuffler(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
                     }
                 }
-              
-            }
-            
- 
-        }
-        
-   
-    }
-    
-    
-    func getNextSampleBuffer(showTimeStamp:CMTime) -> CMSampleBuffer? {
-        
-        BFLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))")
-        
-        let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
-        if assetReader == nil {
-            FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
-            return nil
-        }
-        var videoTrackOutput: AVAssetReaderOutput?
-        for output in assetReader!.outputs {
-            if output.mediaType == AVMediaType.video {
-                videoTrackOutput = output
-            }
-        }
- 
-        var targetSampleBuffer: CMSampleBuffer?
-        
-        while assetReader?.status == .reading {
-  
-            targetSampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
-            if(targetSampleBuffer == nil){
-                BFLog(message: " copyNextSampleBuffer is nil error!!!")
-                return nil
-            }
-            let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
-            
-            //目标帧 时间
-            if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) -  CMTimeGetSeconds(showTimeStamp) > 0.01{
-                
-                let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
-                
-//                if(TimeInterval(endDecoderTime - beginDecoderTime) > 0.033){
-                FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))  查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime))  filter 显示时长\(moveSticker?.timelineIn)")
-//                }
-  
-                return targetSampleBuffer
-
-            }
-          
-        }
-        
-        return nil
-        
-    }
-*/
-    func process(movieFrame frame: CMSampleBuffer) {
-        let movieFrame = (CMSampleBufferGetImageBuffer(frame) == nil) ? lastImageBuffer : CMSampleBufferGetImageBuffer(frame)!
-        if movieFrame != nil {
-            process(movieFrame: movieFrame!, withSampleTime: currentTime)
-        }
-    }
-    
-    func processCurrentBuffer(sampleBuffer: CMSampleBuffer) {
-        let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
-        let duration = asset!.duration // Only used for the progress block so its acuracy is not critical
-
-        sharedImageProcessingContext.runOperationSynchronously {
-            self.process(movieFrame: sampleBuffer)
-
-            FilterLog(message: "seek 时间为: 素材 \(String(describing: self.moveSticker?.locationPath)) 取出每一帧 显示时间: \(CMTimeGetSeconds(self.currentTime)) 帧时间 \(CMTimeGetSeconds(currentSampleTime))")
-
-            // 视频filter 已经播放完一次了, 设置定帧数据 使用精准时间? INT
-            let outTime: Float64 = self.moveSticker!.out == 0 ? duration.seconds : self.moveSticker!.out
-            if (CMTimeGetSeconds(currentSampleTime) - outTime) < 0.033 && self.moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue && lastImageBuffer == nil {
-                FilterLog(message: "设置了定帧!!!")
-                lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)!
-
-            } else {
-                FilterLog(message: "不能设置定帧!!!")
-                CMSampleBufferInvalidate(sampleBuffer)
             }
         }
     }
 
- 
-    func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
+    /// 渲染帧数据
+    /// - Parameters:
+    ///   - movieFrame:帧数据
+    ///   - withSampleTime: 渲染时间戳,不是帧的 PTS 是渲染的时间
+    func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
         let bufferHeight = CVPixelBufferGetHeight(movieFrame)
         let bufferWidth = CVPixelBufferGetWidth(movieFrame)
         CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
@@ -687,7 +518,6 @@ class PQMovieFilter: PQBaseFilter {
         secondChrominanceFramebuffer?.unlock()
     }
 
-    
     func nanosToAbs(_ nanos: UInt64) -> UInt64 {
         return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
     }