瀏覽代碼

1,修改类名 2, 去掉相册导出过程

jsonwang 3 年之前
父節點
當前提交
bc7f82a134

+ 1 - 1
BFFramework/Classes/PQGPUImage/akfilters/PQGPUImageFilterGroup.swift

@@ -33,7 +33,7 @@ open class PQGPUImageFilterGroup: PQBaseFilter{
         
         var showFitler:PQBaseFilter?
         if currentSticker!.type == StickerType.VIDEO.rawValue {
-            showFitler = PQMoveFilter(movieSticker: currentSticker!)
+            showFitler = PQMovieFilter(movieSticker: currentSticker!)
  
 
         } else if currentSticker!.type == StickerType.IMAGE.rawValue {

+ 99 - 107
BFFramework/Classes/PQGPUImage/akfilters/PQMoveFilter.swift → BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -1,5 +1,5 @@
 //
-//  PQMoveFilter.swift
+//  PQMovieFilter.swift
 //  GPUImage_iOS
 //
 //  Created by ak on 2020/8/27.
@@ -38,13 +38,22 @@
  output中可以设置supportsRandomAccess,当为true时,可以重置读取范围,但需要调用方调用copyNextSampleBuffer,直到该方法返回NULL。
  或者重新初始化一个AVAssetReader来设置读取时间。
  如果尝试第一种方案,需要使用seek,可以尝试每次设置一个不太长的区间,以保证读取完整个区间不会耗时太多,且时间间隔最好以关键帧划分。
+ 
+ fps
+ 
+ 25.0 fps :   0.0000  0.0400  0.0800  0.1200  0.1600  0.2000  0.2400  0.2800  0.3200  0.3600  0.4000  0.4400  0.4800  0.5200  0.5600  0.6000  0.6400  0.6800  0.7200  0.7600  0.8000  0.8400  0.8800  0.9200  0.9600  1.0000  1.0400  1.0800  1.1200  1.1600  1.2000
+ 30.0 fps :   0.0000  0.0333  0.0667  0.1000  0.1333  0.1667  0.2000  0.2333  0.2667  0.3000  0.3333  0.3667  0.4000  0.4333  0.4667  0.5000  0.5333  0.5667  0.6000  0.6333  0.6667  0.7000  0.7333  0.7667  0.8000  0.8333  0.8667  0.9000  0.9333  0.9667  1.0000
+ 60.0 fps :   0.0000  0.0167  0.0333  0.0500  0.0667  0.0833  0.1000  0.1167  0.1333  0.1500  0.1667  0.1833  0.2000  0.2167  0.2333  0.2500  0.2667  0.2833  0.3000  0.3167  0.3333  0.3500  0.3667  0.3833  0.4000  0.4167  0.4333  0.4500  0.4667  0.4833  0.5000
+ 80.0 fps :   0.0000  0.0125  0.0250  0.0375  0.0500  0.0625  0.0750  0.0875  0.1000  0.1125  0.1250  0.1375  0.1500  0.1625  0.1750  0.1875  0.2000  0.2125  0.2250  0.2375  0.2500  0.2625  0.2750  0.2875  0.3000  0.3125  0.3250  0.3375  0.3500  0.3625  0.3750
+120.0 fps :   0.0000  0.0083  0.0167  0.0250  0.0333  0.0417  0.0500  0.0583  0.0667  0.0750  0.0833  0.0917  0.1000  0.1083  0.1167  0.1250  0.1333  0.1417  0.1500  0.1583  0.1667  0.1750  0.1833  0.1917  0.2000  0.2083  0.2167  0.2250  0.2333  0.2417  0.2500
+
 
  */
 
 import Foundation
 import UIKit
 
-class PQMoveFilter: PQBaseFilter {
+class PQMovieFilter: PQBaseFilter {
     public var runBenchmark = false
 
     public weak var delegate: MovieInputDelegate?
@@ -52,7 +61,8 @@ class PQMoveFilter: PQBaseFilter {
     var yuvConversionShader: ShaderProgram?
     var asset: AVAsset?
     var videoComposition: AVVideoComposition?
-    var playAtActualSpeed: Bool = false
+    // 使用原始速度
+    var playAtActualSpeed: Bool = true
 
     // Time in the video where it should start.
     var requestedStartTime: CMTime?
@@ -82,19 +92,36 @@ class PQMoveFilter: PQBaseFilter {
 
     // 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
     var lastImageBuffer: CVImageBuffer?
-
+    //
+    var currentRenderImageBuffer: CVPixelBuffer?
+    var currentRenderImageBufferTimeStamp: CMTime = .zero
+    var currentRenderSampleBuffer: CMSampleBuffer?
     // 旋转角度值
     var mImageOrientation: ImageOrientation = .portrait
 
     var inputSize: GLSize = GLSize(width: 0, height: 0)
 
+    var timebaseInfo = mach_timebase_info_data_t()
+
     // 缓存帧属性
     var cacheFrameBufferMaxCount: Int = 16
     // 缓存数量
     @Atomic var cacheframeBuffers: Array = Array<CMSampleBuffer>.init()
 
+    var currentThread: Thread?
     /// Use serial queue to ensure that the picture is smooth
     var seekQueue: DispatchQueue!
+
+    // * 设置播放速率 范围 0 - 8(理论值) rate 正常速度为1.0;小于为慢速;大于为快速。但不能高于解码速度1-2ms硬解一帧
+    var speedRate: Float = 1.0
+
+    // 原视频素材的 FPS
+    var stickerFPS: Float = 0
+        
+    //开始时间,创建 filter 显示的时候有
+    var startTimeStamp: CMTime?
+    
+    
     deinit {}
 
     public init(url: URL) {
@@ -134,7 +161,7 @@ class PQMoveFilter: PQBaseFilter {
         FilterLog(message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut)  裁剪开始时间:\(movieSticker.model_in)  裁剪结束时间:\(movieSticker.out)  路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
 
         startReading()
- 
+
         if #available(iOS 10.0, *) {
             seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
         } else {
@@ -143,31 +170,20 @@ class PQMoveFilter: PQBaseFilter {
         if #available(iOS 10.0, *) {
             seekQueue.activate()
         }
- 
     }
 
     public override func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) {
         super.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex)
 
-        let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
-
-        FilterLog(message: "1111111111 \(currTime)")
- 
+//        let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
     }
- 
 
     open override func renderFrame() {
-
-
         let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
         inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
-
-//        let currTime = CMTimeGetSeconds(CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale))
-
-//        enableSeek = inputFramebuffer.userInfo?["enableSeek"] as? Bool ?? false
-        BFLog(message: "enableSeek is \(enableSeek)")
+  
         currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
-        FilterLog(message: "11定帧!duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
+        FilterLog(message: "duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
 
         renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
 
@@ -180,23 +196,38 @@ class PQMoveFilter: PQBaseFilter {
         releaseIncomingFramebuffers()
 
         if CMTimeGetSeconds(currentTime) >= moveSticker!.timelineIn, CMTimeGetSeconds(currentTime) <= moveSticker!.timelineOut {
-            FilterLog(message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(moveSticker?.model_in) ")
-            // 取出视频每一帧并渲染
+            FilterLog(message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
+
             if enableSeek {
+                FilterLog(message: "seek 到 \(CMTimeGetSeconds(currentTime))  ")
                 resetRangeTime(startTime: currentTime)
                 enableSeek = false
             }
-            readNextVideoFrame()
 
             if !isStartReading {
                 isStartReading = false
                 startReading()
             }
+            
+            if(startTimeStamp == nil){
+                startTimeStamp = currentTime
+            }
+            
+//            let showBuffer  = getNextSampleBuffer(showTimeStamp: currentTime)
+            
+            let stickerTime = CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600)
+          
+            let showBuffer  = getNextSampleBuffer(showTimeStamp:   CMTimeAdd(stickerTime, CMTimeSubtract(currentTime, startTimeStamp ?? .zero)))
+            
+            if(showBuffer != nil){
+                process(movieFrame:CMSampleBufferGetImageBuffer(showBuffer!)!, withSampleTime: currentTime)
+            }
+
         } else {
             isStartReading = false
-            FilterLog(message: "开始显示 movefilter 了 结束了\(String(describing: moveSticker?.timelineIn))  currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(moveSticker?.model_in) ")
+            FilterLog(message: "开始显示 movefilter 了 结束了\(String(describing: moveSticker?.timelineIn))  currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
             FilterLog(message: "不显示 movefilter 了")
-            
+
             assetReader?.cancelReading()
         }
     }
@@ -222,14 +253,14 @@ class PQMoveFilter: PQBaseFilter {
 
     // MARK: -
 
-    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = false, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
+    public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
         asset = AVURLAsset(url: url, options: avAssertOptions)
 
         if asset != nil {
-            let fps = asset!.tracks(withMediaType: .video).first?.nominalFrameRate
-            let cbr = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
-            
-            FilterLog(message: "move filter asset  fps is \(String(describing: fps))  bit rate is \(cbr ?? 0)")
+            stickerFPS = asset!.tracks(withMediaType: .video).first?.nominalFrameRate ?? 0.0
+            let bitRate = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
+
+            FilterLog(message: "move filter asset  fps is \(String(describing: stickerFPS))  bit rate is \(bitRate ?? 0)")
 
             self.videoComposition = videoComposition
             self.playAtActualSpeed = playAtActualSpeed
@@ -266,24 +297,10 @@ class PQMoveFilter: PQBaseFilter {
                 assetReader!.add(readerVideoTrackOutput)
             }
 
-            if let requestedStartTime = self.requestedStartTime {
-//                if enableSeek {
-                    let outTimeSeconds = moveSticker?.out == 0 ? moveSticker!.duration : moveSticker?.out
-//                    assetReader!.timeRange = CMTimeRange(start: requestedStartTime, duration: CMTimeMake(value: Int64((outTimeSeconds ?? 0 - CMTimeGetSeconds(requestedStartTime)) * 600.0), timescale: 600))
-                
-                assetReader!.timeRange = CMTimeRange(start: CMTime.init(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * 600.0), timescale: 600))
-                
-                
+            assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * 600.0), timescale: 600))
 
-//                } else {
-//                    assetReader!.timeRange = CMTimeRange(start: requestedStartTime, duration: CMTimeMake(value: Int64(
-//                        // 这里判断有问题?
-//                        (moveSticker!.timelineIn == 0 && moveSticker!.timelineOut == 0) ? moveSticker!.duration : (moveSticker!.out - moveSticker!.model_in) * 600
-//                    ), timescale: 600))
-//                }
+            FilterLog(message: "set   assetReader!.timeRange is \(assetReader!.timeRange)")
 
-                FilterLog(message: "set   assetReader!.timeRange is \(assetReader!.timeRange)")
-            }
             return assetReader
         } catch {
             debugPrint("ERROR: Unable to create asset reader: \(error)")
@@ -293,10 +310,6 @@ class PQMoveFilter: PQBaseFilter {
 
     open func startReading() {
         FilterLog(message: "开始初始化")
-//        if isStartReading {
-//            FilterLog(message: "已经初始化过了")
-//            return
-//        }
 
         assetReader?.cancelReading()
 
@@ -319,6 +332,7 @@ class PQMoveFilter: PQBaseFilter {
             debugPrint("ERROR: Unable to start reading: \(error)")
             return
         }
+ 
     }
 
     // 设置解码开始时间
@@ -327,65 +341,48 @@ class PQMoveFilter: PQBaseFilter {
         requestedStartTime = startTime
         startReading()
     }
-
-    // 取出第一帧数据
-    func readNextVideoFrame() {
+    
+    
+    func getNextSampleBuffer(showTimeStamp:CMTime) -> CMSampleBuffer? {
+        
+        BFLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))")
+        
         if assetReader == nil {
             FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
-            return
+            return nil
         }
-
         var videoTrackOutput: AVAssetReaderOutput?
         for output in assetReader!.outputs {
             if output.mediaType == AVMediaType.video {
                 videoTrackOutput = output
             }
         }
-   
-
-        let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
-
-        let sampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
-        let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
-        FilterLog(message: "\(beginDecoderTime) \(endDecoderTime)解帧时长  \( TimeInterval(endDecoderTime - beginDecoderTime))")
-        if sampleBuffer == nil && assetReader?.status == .completed {
-            FilterLog(message: "已经播放完成了 \(CMTimeGetSeconds(currentTime))")
-            // 如果不是自动循环模式 且 定帧/裁剪模式 播放完一次后自动停止
-
-            if moveSticker?.materialDurationFit?.fitType != adapterMode.loopAuto.rawValue, moveSticker?.materialDurationFit?.fitType != adapterMode.staticFrame.rawValue {
-                return
-            }
-            // 自动循环模式 重头开始循环
-            if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
-                FilterLog(message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
-                isStartReading = false
-                startReading()
-
-            } else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
-                if lastImageBuffer != nil {
-//                    FilterLog(message: "处理显示定帧")
-//                    let currTime = CMTimeGetSeconds(currentTime!)
-//                    FilterLog(message: "process time is \(currTime)")
-                    process(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
-                }
-
-                return
+ 
+        var targetSampleBuffer: CMSampleBuffer?
+        
+        while assetReader?.status == .reading {
+  
+            let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
+            targetSampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
+            if(targetSampleBuffer == nil){
+                BFLog(message: " copyNextSampleBuffer is nil error!!!")
+                return nil
             }
+            let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
+            
+            //目标帧 时间
+            if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) > CMTimeGetSeconds(showTimeStamp){
+                FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))  命中时间为: \(CMTimeGetSeconds(targetTimeStamp))")
+                
+                return targetSampleBuffer
 
-            return
-        } else {
-            FilterLog(message: "copy sampleBuffer is error ??? \(String(describing: assetReader?.status)) \(sampleBuffer)")
-        }
-
-        if sampleBuffer != nil {
-            if enableSeek {
-                BFLog(message: "cacheframeBuffers 添加后 个数\(cacheframeBuffers.count)")
-                cacheframeBuffers.append(sampleBuffer!)
-            } else {
-                // 正常处理每一帧
-                processCurrentBuffer(sampleBuffer: sampleBuffer!)
             }
+            let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
+            FilterLog(message: "\(beginDecoderTime) \(endDecoderTime)解帧时长  \(TimeInterval(endDecoderTime - beginDecoderTime))")
         }
+        
+        return nil
+        
     }
 
     func processCurrentBuffer(sampleBuffer: CMSampleBuffer) {
@@ -393,11 +390,7 @@ class PQMoveFilter: PQBaseFilter {
         let duration = asset!.duration // Only used for the progress block so its acuracy is not critical
 
         sharedImageProcessingContext.runOperationSynchronously {
-            self.process(movieFrame: sampleBuffer)
-
-            //            资源裁剪的 开始时间18.51261146496816  结束时间: 24.29780254777071
-            // message: 资源裁剪的 开始时间23.9121237307506  结束时间: 27.537509554140126👈
-//            FilterLog(message: "素材 \(self.moveSticker?.locationPath) 取出每一帧的时间: \(self.moveSticker!.out == 0 ? duration.seconds : self.moveSticker!.out) currentSampleTime is \(CMTimeGetSeconds(currentSampleTime))")
+//            self.process(movieFrame: sampleBuffer)
 
             FilterLog(message: "seek 时间为: 素材 \(String(describing: self.moveSticker?.locationPath)) 取出每一帧 显示时间: \(CMTimeGetSeconds(self.currentTime)) 帧时间 \(CMTimeGetSeconds(currentSampleTime))")
 
@@ -414,13 +407,7 @@ class PQMoveFilter: PQBaseFilter {
         }
     }
 
-    func process(movieFrame frame: CMSampleBuffer) {
-        let movieFrame = (CMSampleBufferGetImageBuffer(frame) == nil) ? lastImageBuffer : CMSampleBufferGetImageBuffer(frame)!
-        if movieFrame != nil {
-            process(movieFrame: movieFrame!, withSampleTime: currentTime)
-        }
-    }
-
+ 
     func process(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
         let bufferHeight = CVPixelBufferGetHeight(movieFrame)
         let bufferWidth = CVPixelBufferGetWidth(movieFrame)
@@ -527,4 +514,9 @@ class PQMoveFilter: PQBaseFilter {
         chrominanceFramebuffer.unlock()
         secondChrominanceFramebuffer?.unlock()
     }
+
+    
+    func nanosToAbs(_ nanos: UInt64) -> UInt64 {
+        return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
+    }
 }

+ 1 - 1
BFFramework/Classes/PQGPUImage/akfilters/Tools/PQCompositionExporter.swift

@@ -158,7 +158,7 @@ public class PQCompositionExporter {
         if(input != nil && lastshowSticker != currentSticker){
             var showFitler:PQBaseFilter?
             if currentSticker!.type == StickerType.VIDEO.rawValue {
-                showFitler = PQMoveFilter(movieSticker: currentSticker!)
+                showFitler = PQMovieFilter(movieSticker: currentSticker!)
      
 
             } else if currentSticker!.type == StickerType.IMAGE.rawValue {

+ 34 - 29
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointEditerController.swift

@@ -468,7 +468,7 @@ extension PQStuckPointEditerController {
         // 所有视频总时长
         var videoTotalDuration: Float64 = 0.0
         for video in section.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials(type: "video") {
-            let asset: AVURLAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + video.locationPath), options: nil)
+            let asset: AVURLAsset = AVURLAsset(url: URL(fileURLWithPath: video.locationPath), options: nil)
             videoTotalDuration = videoTotalDuration + Float64(CMTimeGetSeconds(asset.duration))
         }
         if videoTotalDuration == 0 {
@@ -477,7 +477,7 @@ extension PQStuckPointEditerController {
         }
         for sticker in section.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
             if sticker.type == StickerType.VIDEO.rawValue {
-                let asset: AVURLAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + sticker.locationPath), options: nil)
+                let asset: AVURLAsset = AVURLAsset(url: URL(fileURLWithPath: sticker.locationPath), options: nil)
                 // 要分割的段落
                 let clipNum = Int(max(round(Double(kongduan) * CMTimeGetSeconds(asset.duration) / videoTotalDuration), 1))
                 sticker.duration = CMTimeGetSeconds(asset.duration)
@@ -738,49 +738,53 @@ extension PQStuckPointEditerController {
             if synchroMarskView.superview == nil {
                 UIApplication.shared.keyWindow?.addSubview(synchroMarskView)
             }
-            let dispatchGroup = DispatchGroup()
+
             for photo in selectedPhotoData! {
                 if photo.asset != nil, photo.asset?.mediaType == .video, photo.locationPath.count <= 0 {
                     if !isHaveVideo {
                         isHaveVideo = true
                     }
-                    dispatchGroup.enter()
-                    PQPHAssetVideoParaseUtil.exportPHAssetToMP4(phAsset: photo.asset!, deliveryMode: .highQualityFormat) { [weak self] phAsset, _, filePath, _ in
-                        let tempPhoto = self?.selectedPhotoData?.first(where: { material in
-                            material.asset == phAsset
-                        })
-                        if tempPhoto != nil {
-                            if filePath != nil, (filePath?.count ?? 0) > 0 {
-                                tempPhoto?.locationPath = filePath?.replacingOccurrences(of: documensDirectory, with: "") ?? ""
-                                BFLog(message: "导出视频相册地址为")
+ 
+                    PQPHAssetVideoParaseUtil.parasToAVAsset(phAsset: photo.asset!) { avAsset, fileSize, _, _ in
+                        if avAsset is AVURLAsset {
+                            // 创建目录
+
+                            let fileName = (avAsset as! AVURLAsset).url.absoluteString
+                            BFLog(message: "video  fileName is\(fileName)")
+                            let tempPhoto = self.selectedPhotoData?.first(where: { material in
+                                material.asset == photo.asset
+                            })
+                            
+                            if(fileName.count ) > 0 {
+                                tempPhoto?.locationPath = fileName.replacingOccurrences(of: "file:///", with: "")
+                                BFLog(message: "导出视频相册地址为 \(fileName)")
+                            }
+ 
+                            DispatchQueue.main.async {
+                                self.isExportVideosSuccess = true
+                                BFLog(message: "所有相册视频导出成功")
+                              
+                                    self.dealWithDataSuccess()
+                                
                             }
-                            dispatchGroup.leave()
+                            
                         }
                     }
+  
                 }
             }
-            dispatchGroup.notify(queue: DispatchQueue.main) { [weak self] in
-                self?.isExportVideosSuccess = true
-                BFLog(message: "所有相册视频导出成功")
-                // 处理所有数据完成
-                if isHaveVideo {
-                    self?.dealWithDataSuccess()
-                }
-            }
-        }
-        if !isHaveVideo {
-            isExportVideosSuccess = true
-            // 处理所有数据完成
-            dealWithDataSuccess()
+            
         }
+ 
     }
 
     /// 处理所有数据完成
     /// - Returns: <#description#>
     func dealWithDataSuccess() {
-        if !isSynchroMusicInfoSuccess || !isExportVideosSuccess || !isStuckPointDataSuccess {
-            return
-        }
+//        if !isSynchroMusicInfoSuccess  || !isStuckPointDataSuccess {
+//            return
+//        }
+        BFLog(message: "this is rrrrrrrrr")
         playeTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(Int64((stuckPointMusicData?.startTime ?? 0) * 600)), timescale: 600), end: CMTime(value: CMTimeValue(Int64((stuckPointMusicData?.endTime ?? 0) * 600)), timescale: 600))
         createPorjectData()
         settingPlayerView()
@@ -789,3 +793,4 @@ extension PQStuckPointEditerController {
         }
     }
 }
+

+ 1 - 1
BFFramework/Classes/Stuckpoint/ViewModel/PQGPUImagePlayerView.swift

@@ -441,7 +441,7 @@ public class PQGPUImagePlayerView: UIView, RenderViewDelegate {
 
             var showFitler: PQBaseFilter?
             if currentSticker!.type == StickerType.VIDEO.rawValue {
-                showFitler = PQMoveFilter(movieSticker: currentSticker!)
+                showFitler = PQMovieFilter(movieSticker: currentSticker!)
 
             } else if currentSticker!.type == StickerType.IMAGE.rawValue {
                 showFitler = PQImageFilter(sticker: currentSticker!)

+ 1 - 1
BFFramework/Classes/Stuckpoint/ViewModel/PQPlayerViewModel.swift

@@ -65,7 +65,7 @@ open class PQPlayerViewModel: NSObject {
                             filters.append(imageFilter)
 
                         } else if sticker.type == StickerType.VIDEO.rawValue {
-                            let videoFilter = PQMoveFilter(movieSticker: sticker)
+                            let videoFilter = PQMovieFilter(movieSticker: sticker)
 
                             filters.append(videoFilter)