ソースを参照

拆分video录制

harry 3 年 前
コミット
9af6998fff

+ 31 - 1
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenBaseManager.swift

@@ -10,10 +10,24 @@ import GPUImage
 
 
 class BFRecordScreenBaseManager {
-    var recordItem : BFRecordItemModel?
+    
+    weak var dele:BFRecordScreenController?
+    
+    weak var assetPlayer: AVPlayer?
+    weak var recordPlayer : AVPlayer?
+    
+    var recordItem : BFRecordItemModel?{
+        didSet{
+            recordItem?.fetchPlayItemCallBack = {[weak self] model in
+                self?.resetEnv()
+            }
+        }
+    }
     var playView : GPUImageView?
     var filter = GPUImageFilter()
     
+    var currentAssetProgress: CMTime = .zero
+
     func resetEnv(){
         
     }
@@ -21,4 +35,20 @@ class BFRecordScreenBaseManager {
     func startRecord(){
         
     }
+    
+    func endRecord(){
+        
+    }
+    
+    func play(){
+        
+    }
+    
+    func pause(){
+        
+    }
+    
+    func clean(){
+        GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
+    }
 }

+ 51 - 6
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenCameraManager.swift

@@ -7,21 +7,66 @@
 
 import Foundation
 import GPUImage
+import BFCommonKit
+import BFUIKit
+import BFMediaKit
 
 
 class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
 
-    let camera = GPUImageVideoCamera()
+    //
+    var rendView: BFRecordAvatarView = {
+        let v = BFRecordAvatarView(frame: .zero, sessionPreset: AVCaptureSession.Preset.hd1920x1080, location: .backFacing)
+        v.videoPixelsSize = Size(width: Float(cScreenWidth), height: Float(cScreenWidth) * 16.0 / 9.0)
+        return v
+    }()
+    
+    lazy var camera : GPUImageVideoCamera? = {
+        let camera = GPUImageVideoCamera(sessionPreset: AVCaptureSession.Preset.hd1920x1080.rawValue, cameraPosition: AVCaptureDevice.Position.back)
+        camera?.outputImageOrientation = UIInterfaceOrientation.portrait
+        return camera
+        
+    }()
+    
+    lazy var movieWrite : GPUImageMovieWriter? = {
+        let vpath = NSHomeDirectory() + "/camera_writer.mov"
+        let writer = GPUImageMovieWriter(movieURL: URL(fileURLWithPath: vpath), size: CGSize(width: cScreenWidth, height: cScreenWidth * 16 / 9.0), fileType: ".mov", outputSettings: [:])
+        filter.addTarget(writer)
+        
+        return writer
+    }()
     
     override func resetEnv(){
-        if let pv = playView{
-            camera.startCapture()
-            camera.addTarget(filter)
-            filter.addTarget(pv)
-        }
+//        guard let camera = camera else {
+//            cShowHUB(superView: nil, msg: "摄像头开启失败!")
+//            return
+//        }
+//        camera.startCapture()
+//        camera.addTarget(filter)
+//        filter.addTarget(pv)
+        rendView.openCamera()
     }
     
     override func startRecord(){
+//        guard let movieWrite = movieWrite else {
+//            cShowHUB(superView: nil, msg: "录制启动失败")
+//            return
+//        }
+//
+//        movieWrite.startRecording()
         
+        rendView.beginRecord(startTime: .zero)
+
+    }
+    
+    override func endRecord(){
+        let vpath = NSHomeDirectory() + "/camera_writer.mov"
+        let newPath = PQBridgeObject.p_setupFileRename(vpath)
+        
+        rendView.endRecord()
+    }
+    
+    override func clean() {
+        super.clean()
     }
 }

+ 115 - 169
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -37,8 +37,19 @@ public class BFRecordScreenController: BFBaseViewController {
 //    public var currentRecordId: String? // 当前录制Id
 
     // MARK: - 素材参数
-    var rsvmanager = BFRecordScreenVideoManager()
-    var rsimanager = BFRecordScreenImageManager()
+    lazy var rsvmanager : BFRecordScreenVideoManager = {
+        let m = BFRecordScreenVideoManager()
+        m.assetPlayer = assetPlayer
+        m.recordPlayer = recordPlayer
+        return m
+    }()
+    
+    lazy var rsimanager : BFRecordScreenImageManager = {
+        let m = BFRecordScreenImageManager()
+        m.recordPlayer = recordPlayer
+        return m
+    }()
+    
     var rscmanager = BFRecordScreenCameraManager()
     var rscurrentManager = BFRecordScreenBaseManager()
     
@@ -50,7 +61,6 @@ public class BFRecordScreenController: BFBaseViewController {
                 return
             }
             let itemModel = itemModels[currItemModelIndex]
-            rscurrentManager.recordItem = itemModel
             
             switch itemModel.mediaType {
             case .Image:
@@ -71,22 +81,37 @@ public class BFRecordScreenController: BFBaseViewController {
             default:
                 break
             }
-            
-            if let cell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFCameraCoverViewCell {
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
-                rscurrentManager.playView = cell.playView
-                rscurrentManager.resetEnv()
-            }else if let cell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFVideoCoverViewCell {
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
+            rscurrentManager.dele = self
+            if let cell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
+                if (cell as? BFCameraCoverViewCell) != nil {
+                    cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
+                    progressThumV.isHidden = true
+                    if let rendV = (rscurrentManager as? BFRecordScreenCameraManager)?.rendView {
+                        rendV.removeFromSuperview()
+                        cell.playView.addSubview(rendV)
+                        rendV.snp.makeConstraints { make in
+                            make.edges.equalToSuperview()
+                        }
+                        
+                        rendV.recordEndCallBack = {[weak self] isSuccess, sticker in
+                            if let sticker = sticker {
+                                itemModel.videoStickers.append(sticker)
+                            }
+                        }
+                    }
+                    
+                }else {
+                    cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 0), at: 0)
+                    progressThumV.isHidden = false
+                    if itemModel.mediaType == .Video {
+                        rsvmanager.assetPlayer
+                    }
+                }
                 rscurrentManager.playView = cell.playView
-                progressThumV.isHidden = false
-                rscurrentManager.resetEnv()
-            }else if let cell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
-                rscurrentManager.playView = cell.playView
-                progressThumV.isHidden = false
+                rscurrentManager.recordItem = itemModel
                 rscurrentManager.resetEnv()
             }
+
         }
     }
     public var itemModels = [BFRecordItemModel]()
@@ -119,22 +144,22 @@ public class BFRecordScreenController: BFBaseViewController {
     }
 
     var currentAssetProgress: CMTime = .zero
-//    {
-//        didSet {
-//            BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
-//        }
-//    } // 当前素材播放的进度
+    {
+        didSet {
+            BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
+            rscurrentManager.currentAssetProgress = currentAssetProgress
+        }
+    } // 当前素材播放的进度
     // 播放器开始播放时间
     var recordStartPlayTime: CMTime = .zero
     // 某个录音开始播放时间
     var currenStartPlayTime: CMTime = .zero
     var pauseTime: Double = 0 // 停止无操作的时间点
 
-    var assetPlayer: AVPlayer? // 原视频音频播放器
+    var assetPlayer: AVPlayer = AVPlayer(playerItem: nil) // 原视频音频播放器
+    var recordPlayer: AVPlayer = AVPlayer(playerItem: nil) // 录音音频播放器
 
     var hadPrepareToPlayRecord = false // 录音播放器准备
-    var recordPlayer: AVPlayer? // 录音音频播放器
-    var movie: GPUImageMovie? // 视频预览
     var playView: GPUImageView? // 视频展示视图
 
     // MARK: 行为参数
@@ -370,11 +395,11 @@ public class BFRecordScreenController: BFBaseViewController {
             self?.noSpeakVolume = noHaveSpeak / 100.0
             if !(self?.isNormalPlaying ?? false), !(self?.isRecording ?? false) {
                 if self?.deleteRecordBtn.isHidden ?? false {
-//                    self?.recordPlayer?.volume = 0
-                    self?.assetPlayer?.volume = self?.noSpeakVolume ?? 1.0
+//                    self?.recordPlayer.volume = 0
+                    self?.assetPlayer.volume = self?.noSpeakVolume ?? 1.0
                 } else {
-//                    self?.recordPlayer?.volume = 1.0
-                    self?.assetPlayer?.volume = self?.haveSpeakVolume ?? 0.0
+//                    self?.recordPlayer.volume = 1.0
+                    self?.assetPlayer.volume = self?.haveSpeakVolume ?? 0.0
                 }
             }
         }
@@ -461,15 +486,18 @@ public class BFRecordScreenController: BFBaseViewController {
     // MARK: - ----------------- 生命周期
 
     deinit {
-        cleanMovieTarget()
+        rscmanager.clean()
+        rsvmanager.clean()
+        rsimanager.clean()
+        
         NotificationCenter.default.removeObserver(self)
         avplayerTimeObserver?.invalidate()
         recordPlayerTimeObserver?.invalidate()
         if isRecording {
             recorderManager?.stopRecord(isCancel: true)
         }
-        assetPlayer?.pause()
-        recordPlayer?.pause()
+        assetPlayer.pause()
+        recordPlayer.pause()
     }
 
     @objc func didBecomeActive() {
@@ -1152,19 +1180,12 @@ public class BFRecordScreenController: BFBaseViewController {
         BFLog(3, message: "开始录制-开始:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),\(model)")
         recorderManager?.startRecord()
         recorderManager?.audioRecorder?.startNeoNui(NeoNuiToken ?? "", appid: NeoNuiAPPID ?? "")
-        isRecording = true
 
         if !avatarView.isHidden {
             avatarView.beginRecord()
         }
-        if itemModels[currItemModelIndex].mediaType == .Video {
-            if !movieIsProcessing {
-                movie?.startProcessing()
-                movieIsProcessing = true
-            }
-            assetPlayer?.volume = 0
-            assetPlayer?.play()
-        }
+
+        rscurrentManager.startRecord()
         // 录制中不显示播放按钮
         playBtn.isSelected = true
         playBtn.isHidden = playBtn.isSelected
@@ -1573,27 +1594,27 @@ public class BFRecordScreenController: BFBaseViewController {
 
     func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed _: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
 //        if currentPlayRecordIndex >= 0 {
-//            if assetPlayer?.volume != haveSpeakVolume{
-//                assetPlayer?.volume = haveSpeakVolume
+//            if assetPlayer.volume != haveSpeakVolume{
+//                assetPlayer.volume = haveSpeakVolume
 //            }
 //        }else{
-//            if assetPlayer?.volume != noSpeakVolume {
-//                assetPlayer?.volume = noSpeakVolume
+//            if assetPlayer.volume != noSpeakVolume {
+//                assetPlayer.volume = noSpeakVolume
 //            }
 //        }
 
         if itemModels[currItemModelIndex].voiceStickers.first(where: { m in
             CMTimeCompare(m.startCMTime, currentT) <= 0 && CMTimeCompare(currentT, m.endCMTime) <= 0
         }) != nil {
-            if assetPlayer?.volume != haveSpeakVolume {
-                assetPlayer?.volume = haveSpeakVolume
+            if assetPlayer.volume != haveSpeakVolume {
+                assetPlayer.volume = haveSpeakVolume
             }
         } else {
-            if assetPlayer?.volume != noSpeakVolume {
-                assetPlayer?.volume = noSpeakVolume
+            if assetPlayer.volume != noSpeakVolume {
+                assetPlayer.volume = noSpeakVolume
             }
         }
-//        BFLog(1, message: "volume:\(assetPlayer?.volume ?? -1)")
+//        BFLog(1, message: "volume:\(assetPlayer.volume ?? -1)")
 
         if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
             return
@@ -1615,20 +1636,17 @@ public class BFRecordScreenController: BFBaseViewController {
 //        BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startCMTime.seconds) ~ \(recordedAudio.endCMTime.seconds), \(recordedAudio.wavFilePath ?? "")")
 
         // 创建播放器
-        if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
+        if (recordPlayer.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
             let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
-            BFLog(1, message: "录音播放器初始化:\(recordPlayer == nil ? "init player" : "replace item")")
+            BFLog(1, message: "录音播放器初始化:\("replace item")")
 
-            if let player = recordPlayer {
-                player.pause()
-                if let playItem = player.currentItem {
-                    NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
-                    recordPlayer?.replaceCurrentItem(with: newItem)
-                }
-            } else {
-                recordPlayer = AVPlayer(playerItem: newItem)
+            recordPlayer.pause()
+            if let playItem = recordPlayer.currentItem {
+                NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
+                recordPlayer.replaceCurrentItem(with: newItem)
             }
-            recordPlayer?.volume = 1
+            
+            recordPlayer.volume = 1
             currentPlayRecordIndex = -1
             hadPrepareToPlayRecord = false
             BFLog(1, message: "录音播放器初始化(有时候不准)")
@@ -1640,13 +1658,13 @@ public class BFRecordScreenController: BFBaseViewController {
                 }
                 sself.hadPrepareToPlayRecord = false
                 sself.currentPlayRecordIndex = -1
-//                sself.recordPlayer?.volume = 0
-//                sself.assetPlayer?.volume = sself.noSpeakVolume
+//                sself.recordPlayer.volume = 0
+//                sself.assetPlayer.volume = sself.noSpeakVolume
                 BFLog(3, message: "播放结束")
                 didPlayToEndTime((shouldPlayRecordIndex, recordedAudio), newItem)
             }
             recordPlayerTimeObserver?.invalidate()
-            recordPlayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
+            recordPlayerTimeObserver = recordPlayer.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
                 guard let sself = self, let rPlay = recordPlayer else {
                     BFLog(3, message: "sself为空")
                     return
@@ -1678,22 +1696,22 @@ public class BFRecordScreenController: BFBaseViewController {
     ///   - shouldPlayRecordIndex: <#shouldPlayRecordIndex description#>
     ///   - recordedAudio: <#recordedAudio description#>
     func videoMaterialRecordPlay(at currentT: CMTime, shouldPlayRecordIndex: Int, recordedAudio: PQVoiceModel) {
-        //        if recordPlayer?.currentItem?.duration.timescale == 0 {
+        //        if recordPlayer.currentItem?.duration.timescale == 0 {
         //            BFLog(1, message: "时间timescale  == 0")
-        //            playFailed((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), recordPlayer?.currentItem)
+        //            playFailed((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), recordPlayer.currentItem)
         //            return
         //        }
         synced(currentPlayRecordIndex) { [weak self] in
             guard let sself = self else {
                 return
             }
-//            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startCMTime.seconds),\(CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2)")
+//            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startCMTime.seconds),\(CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2)")
 
             if !hadPrepareToPlayRecord,
                CMTimeGetSeconds(currentT) >= (recordedAudio.startCMTime.seconds - 0.1),
                CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
             {
-                if itemModels[currItemModelIndex].mediaType == .Video, recordPlayer?.currentItem?.duration.timescale == 0 {
+                if itemModels[currItemModelIndex].mediaType == .Video, recordPlayer.currentItem?.duration.timescale == 0 {
                     return
                 }
                 // 应当开始播放了
@@ -1701,19 +1719,19 @@ public class BFRecordScreenController: BFBaseViewController {
                 if currentPlayRecordIndex == -1, sself.isNormalPlaying {
                     let second = currentT - recordedAudio.startCMTime
                     DispatchQueue.main.async { [weak self] in
-                        self?.recordPlayer?.seek(to: second, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
+                        self?.recordPlayer.seek(to: second, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
                             if finished, self?.isNormalPlaying ?? false {
-                                self?.recordPlayer?.play()
-//                                self?.recordPlayer?.volume = 1
-//                                self?.assetPlayer?.volume = self?.haveSpeakVolume ?? 0
-                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds ?? 0)")
+                                self?.recordPlayer.play()
+//                                self?.recordPlayer.volume = 1
+//                                self?.assetPlayer.volume = self?.haveSpeakVolume ?? 0
+                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer.currentItem?.duration ?? .zero)), \(self?.recordPlayer.currentItem?.currentTime().seconds ?? 0)")
                             }
                         })
                     }
                     currentPlayRecordIndex = shouldPlayRecordIndex
                     hadPrepareToPlayRecord = true
                     BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
-                    BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
+                    BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
                 }
             }
         }
@@ -1729,7 +1747,7 @@ public class BFRecordScreenController: BFBaseViewController {
             guard let sself = self, sself.isNormalPlaying else {
                 return
             }
-//            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startCMTime.seconds),\(CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2)")
+//            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startCMTime.seconds),\(CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2)")
 
             if CMTimeGetSeconds(currentT) >= (recordedAudio.startCMTime.seconds - 0.1),
                CMTimeCompare(currentT, recordedAudio.endCMTime) <= 0 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
@@ -1738,10 +1756,10 @@ public class BFRecordScreenController: BFBaseViewController {
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 if sself.isNormalPlaying {
                     let second = currentT - recordedAudio.startCMTime
-                    sself.recordPlayer?.seek(to: second, toleranceBefore: CMTime(seconds: 1, preferredTimescale: 1000), toleranceAfter: CMTime(seconds: 1, preferredTimescale: 1000), completionHandler: { isFinished in
+                    sself.recordPlayer.seek(to: second, toleranceBefore: CMTime(seconds: 1, preferredTimescale: 1000), toleranceAfter: CMTime(seconds: 1, preferredTimescale: 1000), completionHandler: { isFinished in
                     })
-                    sself.recordPlayer?.play()
-                    BFLog(3, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
+                    sself.recordPlayer.play()
+                    BFLog(3, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
                 }
             }
         }
@@ -1758,7 +1776,7 @@ public class BFRecordScreenController: BFBaseViewController {
         isNormalPlaying = true
         if isEndPlay || (itemModels[currItemModelIndex].mediaType == .Image && CMTimeCompare(currentAssetProgress, itemModels[currItemModelIndex].materialDuraion) >= 0) {
             isEndPlay = false
-            assetPlayer?.seek(to: CMTime.zero)
+            assetPlayer.seek(to: CMTime.zero)
             progressThumV.progress = 0
             currentPlayRecordIndex = -1
             BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
@@ -1767,18 +1785,9 @@ public class BFRecordScreenController: BFBaseViewController {
             }
             currentAssetProgress = CMTime.zero
         }
-        if itemModels[currItemModelIndex].mediaType == .Video {
-            if !movieIsProcessing {
-                movie?.startProcessing()
-                movieIsProcessing = true
-            }
-            // add by ak 切换段落时会有一小段原素材的声音播放时先把视频原音量设置为0
-            assetPlayer?.volume = 0
-            assetPlayer?.play()
-        } else {
-            // 处理图片音频播放
-            startPlayRecord(time: currentAssetProgress)
-        }
+        
+        rscurrentManager.play()
+        
 
         deleteRecordBtn.isHidden = true
 
@@ -1804,8 +1813,8 @@ public class BFRecordScreenController: BFBaseViewController {
         withDrawBtn.isHidden = false
         recordBtn.isHidden = (itemModels[currItemModelIndex].mediaType == .Image && isEndPlay) ? false : isEndPlay
 
-        assetPlayer?.pause()
-        recordPlayer?.pause()
+        assetPlayer.pause()
+        recordPlayer.pause()
         recordStartPlayTime = CMTime.zero
         pauseTime = currentAssetProgress.seconds
         currentPlayRecordIndex = -1
@@ -1833,7 +1842,6 @@ public class BFRecordScreenController: BFBaseViewController {
                                 self?.progressThumV.recordItem = itemModel
                                 self?.progressThumV.isHidden = false
                                 self?.recordBtn.isEnabled = true
-//                                self?.reloadVideoMaterial()
                             }
                         }
                     } else {
@@ -1865,43 +1873,21 @@ public class BFRecordScreenController: BFBaseViewController {
         }
     }
 
-    func setVideoPlay(item: AVPlayerItem?, imageView: GPUImageView?) {
-        guard let playerItem = item else {
-            return
-        }
-        guard let preView = imageView else {
-            return
-        }
-        if movie != nil {
-            cleanMovieTarget()
-        }
-        movie = GPUImageMovie(playerItem: playerItem)
-        //        movie?.runBenchmark = true
-        movie?.playAtActualSpeed = true
-
-        let filter = GPUImageFilter()
-        movie?.addTarget(filter)
-        filter.addTarget(preView)
-
-        movie?.startProcessing()
-        movieIsProcessing = true
-    }
-
     func setAudioPlay(item: AVPlayerItem?) {
         BFLog(message: "设置播放器item:\(String(describing: item))")
         guard let item = item else {
             return
         }
-        if let playItem = assetPlayer?.currentItem {
+        if let playItem = assetPlayer.currentItem {
             NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
-            assetPlayer?.replaceCurrentItem(with: item)
+            assetPlayer.replaceCurrentItem(with: item)
             BFLog(message: "设置播放器playItem-替换:\(String(describing: item))")
         } else {
-            assetPlayer = AVPlayer(playerItem: item)
+            assetPlayer.replaceCurrentItem(with: item)
             BFLog(message: "设置播放器item-替换:\(String(describing: item))")
-//            assetPlayer?.volume = noSpeakVolume
+//            assetPlayer.volume = noSpeakVolume
             avplayerTimeObserver?.invalidate()
-            avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
+            avplayerTimeObserver = assetPlayer.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
                 // 进度监控
                 self?.periodicTimeObserver(item: item, time: time)
                 if self?.isNormalPlaying ?? false {
@@ -1911,7 +1897,7 @@ public class BFRecordScreenController: BFBaseViewController {
             } as? NSKeyValueObservation
         }
 
-        NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer?.currentItem, queue: .main) { [weak self] _ in
+        NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer.currentItem, queue: .main) { [weak self] _ in
             guard let sself = self else {
                 return
             }
@@ -1973,19 +1959,6 @@ public class BFRecordScreenController: BFBaseViewController {
         }
     }
 
-    func cleanMovieTarget() {
-        movie?.cancelProcessing()
-        movieIsProcessing = false
-        movie?.targets().forEach { target in
-            if let objc = target as? GPUImageOutput {
-                objc.removeAllTargets()
-            }
-        }
-        movie?.removeAllTargets()
-        movie?.removeFramebuffer()
-        GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
-    }
-
     // MARK: - 录音对应图像绘制
 
     // 撤销按钮修改title,重绘
@@ -2024,7 +1997,7 @@ public class BFRecordScreenController: BFBaseViewController {
                     BFLog(message: "更新录音进度\(#function)- \(self?.currentAssetProgress.seconds ?? 0)")
                     self!.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
                 }
-                assetPlayer?.seek(to: currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000)) { _ in
+                assetPlayer.seek(to: currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000)) { _ in
                 }
             }
         } else {
@@ -2067,28 +2040,6 @@ public class BFRecordScreenController: BFBaseViewController {
             imageRecordProgress(isRecord: true, progress: progress)
         }
     }
-
-    // 修正视频旋转方向,因为自己录制的竖屏视频会预览为横屏
-    func reloadVideoMaterial() {
-        let recordItem = itemModels[currItemModelIndex]
-        BFLog(message: "设置播放器reloadMaterial:\(recordItem)")
-        if let vasset = recordItem.videoAsset, let playItem = recordItem.playItem, let cell: BFImageCoverViewCell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
-            BFLog(message: "设置播放器reloadMaterial-开始:\(recordItem)")
-            setVideoPlay(item: playItem, imageView: cell.playView)
-            setAudioPlay(item: playItem)
-            let degress = degressFromVideoFile(asset: vasset)
-            switch degress {
-            case 90:
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
-            case 180:
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 7), at: 0)
-            case 270:
-                cell.playView.setInputRotation(GPUImageRotationMode(rawValue: 1), at: 0)
-            default:
-                break
-            }
-        }
-    }
 }
 
 extension BFRecordScreenController: GPUImageMovieDelegate {
@@ -2152,16 +2103,14 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             }
         }
         recordItem.fetchPlayItemCallBack = { [weak self, weak recordItem] item in
-            guard item != nil else {
-                cShowHUB(superView: nil, msg: "视频获取失败:\(recordItem?.index ?? 0)")
+            guard let sself = self else {
                 return
             }
-            guard let sself = self else {
+            guard item != nil else {
+                cShowHUB(superView: nil, msg: "视频获取失败:\(recordItem?.index ?? 0)")
                 return
             }
-//            if indexPath.item == sself.currItemModelIndex {
-//                sself.reloadVideoMaterial()
-//            }
+            sself.rscurrentManager.resetEnv()
         }
         cell.btnClickHandle = { [weak self] _, _ in
             guard let sself = self else {
@@ -2234,8 +2183,8 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             currentPlayRecordIndex = -1
             showSubtitleIndex = -1
             // 重置播放器
-            assetPlayer?.seek(to: CMTime.zero)
-            recordPlayer?.seek(to: CMTime.zero)
+            assetPlayer.seek(to: CMTime.zero)
+            recordPlayer.seek(to: CMTime.zero)
 
             if let voice = itemModels[page].voiceStickers.enumerated().first(where: { m in
                 m.1.startTime == 0
@@ -2246,10 +2195,7 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             searchStopAtRecordRange()
             changeWithDrawBtnLayout(0)
             pauseTime = 0
-            if recordItem.mediaType == .Video {
-                reloadVideoMaterial()
-                assetPlayer?.seek(to: .zero, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000))
-            }
+
             if changeItemHandle != nil {
                 changeItemHandle!(page)
             }

+ 13 - 0
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenImageManager.swift

@@ -8,7 +8,20 @@
 import Foundation
 
 class BFRecordScreenImageManager : BFRecordScreenBaseManager{
+    
+    override func resetEnv() {
+        
+    }
+    
     override func startRecord() {
         super.startRecord()
     }
+    
+    override func play() {
+        dele?.startPlayRecord(time: currentAssetProgress)
+    }
+    
+    override func clean() {
+        super.clean()
+    }
 }

+ 67 - 0
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenVideoManager.swift

@@ -6,9 +6,76 @@
 //
 
 import Foundation
+import GPUImage
+import AVFoundation
+
 
 class BFRecordScreenVideoManager : BFRecordScreenBaseManager{
+    var movie: GPUImageMovie? // 视频预览
+    var movieIsProcessing : Bool = false
+    
+    override func resetEnv() {
+        guard let playerItem = recordItem?.playItem else {
+            return
+        }
+        guard let preView = playView else {
+            return
+        }
+        if movie != nil {
+            cleanMovieTarget()
+        }
+        movie = GPUImageMovie(playerItem: playerItem)
+        //        movie?.runBenchmark = true
+        movie?.playAtActualSpeed = true
+
+        let filter = GPUImageFilter()
+        movie?.addTarget(filter)
+        filter.addTarget(preView)
+
+        movie?.startProcessing()
+        movieIsProcessing = true
+        
+        dele?.setAudioPlay(item: playerItem)
+//        assetPlayer?.seek(to: .zero, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000))
+
+    }
+    
     override func startRecord() {
+        
+        if !movieIsProcessing {
+            movie?.startProcessing()
+            movieIsProcessing = true
+        }
+        assetPlayer?.volume = 0
+        assetPlayer?.play()
+
         super.startRecord()
     }
+    
+    override func play() {
+        if !movieIsProcessing {
+            movie?.startProcessing()
+            movieIsProcessing = true
+        }
+        assetPlayer?.volume = 0
+        assetPlayer?.play()
+    }
+    
+    
+    func cleanMovieTarget() {
+        movie?.cancelProcessing()
+        movieIsProcessing = false
+        movie?.targets().forEach { target in
+            if let objc = target as? GPUImageOutput {
+                objc.removeAllTargets()
+            }
+        }
+        movie?.removeAllTargets()
+        movie?.removeFramebuffer()
+    }
+    
+    override func clean() {
+        cleanMovieTarget()
+        super.clean()
+    }
 }

+ 19 - 12
BFRecordScreenKit/Classes/RecordScreen/View/BFRecordAvatarView.swift

@@ -9,6 +9,7 @@ import BFCommonKit
 import BFMediaKit
 import BFUIKit
 import Foundation
+import GPUImage
 
 // 录制完成回调
 typealias recordEndCallBack = (_ isSucess: Bool, _ material: PQEditVisionTrackMaterialsModel?) -> Void
@@ -40,30 +41,33 @@ class BFRecordAvatarView: UIView {
     // 预览时纹理大小&合成视频大小
     var videoPixelsSize: Size = Size(width: 120 * Float(UIScreen.main.scale), height: 120 * Float(UIScreen.main.scale))
 
-    override init(frame: CGRect) {
+    init(frame: CGRect, sessionPreset:AVCaptureSession.Preset = .high, location:PhysicalCameraLocation = .frontFacing) {
         super.init(frame: frame)
-        backgroundColor = .yellow
+//        backgroundColor = .yellow
 
         renderView = RenderView(frame: bounds)
         addSubview(renderView)
+        renderView.snp.makeConstraints { make in
+            make.edges.equalToSuperview()
+        }
 
         addSubview(closedBtn)
 //        closedBtn.frame = CGRect(x: frame.maxX - 68, y: frame.maxY - 68, width: 68, height: 68)
-//        do {
-//            camera = try Camera(sessionPreset: .high, location: .frontFacing, captureAsYUV: true)
-//            //            camera.runBenchmark = true
-//
+        do {
+            camera = try Camera(sessionPreset: sessionPreset, location: location, captureAsYUV: true)
+            //            camera.runBenchmark = true
+
 //            let conertFilter = PQCornerFilter()
 //            let cropFilter = Crop()
 //            cropFilter.cropSizeInPixels = videoPixelsSize
 //            cropFilter.cropSizeInPixels = Size(width: 1080, height: 1080)
 //            cropFilter.locationOfCropInPixels = Position(0, (1920 - 1080) / 2)
-//
-////            camera  --> cropFilter --> conertFilter --> renderView
-////            camera  --> cropFilter --> conertFilter --> renderView
-//        } catch {
-//            fatalError("Could not initialize rendering pipeline: \(error)")
-//        }
+
+//            camera  --> cropFilter --> conertFilter --> renderView
+//            camera  --> cropFilter --> conertFilter --> renderView
+        } catch {
+            fatalError("Could not initialize rendering pipeline: \(error)")
+        }
 
         let Drag = UIPanGestureRecognizer(target: self, action: #selector(onDrag(gesture:)))
         addGestureRecognizer(Drag)
@@ -86,6 +90,9 @@ class BFRecordAvatarView: UIView {
     // 打开摄像头
     func openCamera() {
         camera.startCapture()
+//        let filter = SketchFilter()
+        camera.addTarget(renderView, atTargetIndex: 0)
+//        filter.addTarget(renderView, atTargetIndex: 0)
     }
 
     // 关闭摄像头

+ 15 - 0
BFRecordScreenKit/Classes/RecordScreen/View/Cell/BFVideoCoverViewCell.swift

@@ -15,5 +15,20 @@ open class BFVideoCoverViewCell: BFImageCoverViewCell {
         let cell: BFVideoCoverViewCell = collectionView.dequeueReusableCell(withReuseIdentifier: String(describing: BFVideoCoverViewCell.self), for: indexPath) as! BFVideoCoverViewCell
         return cell
     }
+    
+    public override func addData() {
+        super.addData()
+        let degress = degressFromVideoFile(asset: (recordItem?.videoAsset)!)
+        switch degress {
+        case 90:
+            playView.setInputRotation(GPUImageRotationMode(rawValue: 2), at: 0)
+        case 180:
+            playView.setInputRotation(GPUImageRotationMode(rawValue: 7), at: 0)
+        case 270:
+            playView.setInputRotation(GPUImageRotationMode(rawValue: 1), at: 0)
+        default:
+            break
+        }
+    }
 
 }