浏览代码

Merge branch 'master' of https://git.yishihui.com/iOS/BFRecordScreenKit

# Conflicts:
#	BFRecordScreenKit/Classes/BFRecordScreenController.swift
合并代码
jsonwang 3 年之前
父节点
当前提交
271976a0a8

二进制
BFRecordScreenKit/Assets/BFRecordScreenKit.xcassets/changeVoice_h.imageset/changeVoice_h@3x.png


+ 21 - 0
BFRecordScreenKit/Assets/BFRecordScreenKit.xcassets/changeVoice_n.imageset/Contents.json

@@ -0,0 +1,21 @@
+{
+  "images" : [
+    {
+      "idiom" : "universal",
+      "scale" : "1x"
+    },
+    {
+      "idiom" : "universal",
+      "scale" : "2x"
+    },
+    {
+      "filename" : "changeVoice_n@3x.png",
+      "idiom" : "universal",
+      "scale" : "3x"
+    }
+  ],
+  "info" : {
+    "author" : "xcode",
+    "version" : 1
+  }
+}

二进制
BFRecordScreenKit/Assets/BFRecordScreenKit.xcassets/changeVoice_n.imageset/changeVoice_n@3x.png


二进制
BFRecordScreenKit/Assets/BFRecordScreenKit.xcassets/withdraw_h.imageset/withdraw_h@3x.png


二进制
BFRecordScreenKit/Assets/BFRecordScreenKit.xcassets/withdraw_n.imageset/withdraw_n@3x.png


+ 4 - 0
BFRecordScreenKit/Classes/BFRSComm.swift

@@ -18,3 +18,7 @@ public func imageInRecordScreenKit(by name: String) -> UIImage? {
 func currentBundle() -> Bundle? {
     return Bundle.current(moduleName: "BFRecordScreenKit", isAssets: false)
 }
+
+func ThemeStyleGreen() -> UIColor {
+    return UIColor.hexColor(hexadecimal: "#28BE67")
+}

+ 5 - 3
BFRecordScreenKit/Classes/BFRecordExport.swift

@@ -200,7 +200,7 @@ public class BFRecordExport {
                 }else{
                     let error = NSError(domain: "err", code: -1, userInfo: nil)
                     self?.exportCompletion?(error as Error, nil)
-                    
+                    cShowHUB(superView: nil, msg: "导出失败")
                 }
                 
                 // 导出完成后取消导出
@@ -217,8 +217,10 @@ public class BFRecordExport {
         PHPhotoLibrary.shared().performChanges {
             PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: url)
         } completionHandler: { isFinished, _ in
-            DispatchQueue.main.async {
-                cShowHUB(superView: nil, msg: "保存成功")
+            if isFinished {
+                DispatchQueue.main.async {
+                    cShowHUB(superView: nil, msg: "保存成功")
+                }
             }
         }
     }

+ 127 - 87
BFRecordScreenKit/Classes/BFRecordScreenController.swift

@@ -23,7 +23,16 @@ public class BFRecordScreenController: BFBaseViewController {
     public var asset:PHAsset?
 //    var shouldPlayRecordIndex:Int = -1          // 当前应该播放的录音资源序号
     var currentPlayRecordIndex:Int = -1         // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
-    var isRecording = false                     // 是否正在录音
+    var isRecording = false {                   // 是否正在录音
+        didSet{
+            withDrawBtn.isHidden = isRecording
+            changeVoiceBtn.isHidden = isRecording
+            
+            recordBtn.setTitle(isRecording ? "松手 完成" :"按住 说话", for: .normal)
+            recordBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67", alpha: isRecording ? 0.6 : 1)
+        }
+    }
+    
     var isNormalPlaying = false {               // 是否正在播放
         didSet{
             playBtn.isSelected = isNormalPlaying
@@ -56,7 +65,7 @@ public class BFRecordScreenController: BFBaseViewController {
         manager.cancelRecordHandle = { error in
             
         }
-        manager.endRecordHandle = {[weak self] (isTimeout, model) in
+        manager.endRecordHandle = {[weak self] (model, error) in
             if let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
                 // 加入到语音数组里
                 let ass = AVURLAsset(url: URL(fileURLWithPath: model.wavFilePath))
@@ -120,10 +129,12 @@ public class BFRecordScreenController: BFBaseViewController {
     
     lazy var recordBtn:UIButton = {
         let btn = UIButton(type: .custom)
-        btn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
+        btn.backgroundColor = ThemeStyleGreen()
+        btn.setTitle("按住 说话", for: .normal)
         btn.adjustsImageWhenHighlighted = false
         btn.addTarget(self, action: #selector(startRecord), for: .touchDown)
         btn.addTarget(self, action: #selector(endRecord), for: .touchUpInside)
+
         return btn
     }()
     
@@ -150,20 +161,33 @@ public class BFRecordScreenController: BFBaseViewController {
         sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpInside)
         sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpOutside)
         sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchCancel)
+        sliderView.addTarget(self, action: #selector(sliderValueDidChanged(sender:)), for: .valueChanged)
         return sliderView
     }()
     
-    lazy var closeBtn:UIButton = {
+    lazy var withDrawBtn:UIButton = {
         let btn = UIButton(type: .custom)
-        btn.setImage(imageInRecordScreenKit(by: "xx"), for: .normal)
-        btn.addTarget(self, action: #selector(closePage), for: .touchUpInside)
+        btn.setImage(imageInRecordScreenKit(by: "withdraw_n"), for: .normal)
+        btn.setImage(imageInRecordScreenKit(by: "withdraw_h"), for: .highlighted)
+        btn.setTitle("撤销", for: .normal)
+        btn.setTitleColor(.white, for: .normal)
+        btn.setTitleColor(.gray, for: .highlighted)
+        btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
+        btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
+        btn.addTarget(self, action: #selector(withdrawAction), for: .touchUpInside)
         return btn
     }()
     
-    lazy var nextBtn:UIButton = {
+    lazy var changeVoiceBtn:UIButton = {
         let btn = UIButton(type: .custom)
-        btn.setImage(imageInRecordScreenKit(by: "gou"), for: .normal)
-        btn.addTarget(self, action: #selector(nextAction), for: .touchUpInside)
+        btn.setImage(imageInRecordScreenKit(by: "changeVoice_n"), for: .normal)
+        btn.setImage(imageInRecordScreenKit(by: "changeVoice_h"), for: .highlighted)
+        btn.setTitle("变声", for: .normal)
+        btn.setTitleColor(.white, for: .normal)
+        btn.setTitleColor(ThemeStyleGreen(), for: .highlighted)
+        btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
+        btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
+        btn.addTarget(self, action: #selector(changeVoiceAction), for: .touchUpInside)
         return btn
     }()
     
@@ -182,7 +206,7 @@ public class BFRecordScreenController: BFBaseViewController {
         return toolV
         
     }()
-    
+ 
     //头像  add by ak
     lazy var avatarView:BFRecordAvatarView = {
         let avatarView = BFRecordAvatarView.init(frame: CGRect.init(x: 10, y: 10, width: 120, height: 120))
@@ -203,13 +227,15 @@ public class BFRecordScreenController: BFBaseViewController {
         btn.addTarget(self, action: #selector(drawPin), for: .touchUpInside)
         return btn
     }()
-    
+ 
     //MARK: ------------------ 生命周期
     deinit {
         cleanMovieTarget()
         NotificationCenter.default.removeObserver(self)
         avplayerTimeObserver?.invalidate()
-        recorderManager.stopRecord(isCancel: true)
+        if isRecording{
+            recorderManager.stopRecord(isCancel: true)
+        }
         assetPlayer?.pause()
         recordPlayer?.pause()
         
@@ -242,8 +268,8 @@ public class BFRecordScreenController: BFBaseViewController {
         bottomeView.addSubview(recordBtn)
         bottomeView.addSubview(progessSildeBackV)
         bottomeView.addSubview(progessSilde)
-        bottomeView.addSubview(closeBtn)
-        bottomeView.addSubview(nextBtn)
+        bottomeView.addSubview(withDrawBtn)
+        bottomeView.addSubview(changeVoiceBtn)
         
         if checkStatus() {
             try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
@@ -263,26 +289,27 @@ public class BFRecordScreenController: BFBaseViewController {
             make.height.equalTo(adapterWidth(width: 220))
         }
         
-        recordBtn.snp.makeConstraints { make in
-            make.width.height.equalTo(120)
-            make.centerX.equalToSuperview()
-            make.top.equalTo(27)
-        }
-        
-        closeBtn.snp.makeConstraints { make in
+        withDrawBtn.snp.makeConstraints { make in
             make.left.equalToSuperview()
-            make.width.height.equalTo(60)
-            make.top.equalTo(recordBtn.snp.bottom).offset(-10)
+            make.width.height.equalTo(65)
+            make.top.equalTo(128)
         }
-        nextBtn.snp.makeConstraints { make in
+        changeVoiceBtn.snp.makeConstraints { make in
             make.right.equalToSuperview()
-            make.top.width.height.equalTo(closeBtn)
+            make.top.width.height.equalTo(withDrawBtn)
+        }
+        
+        recordBtn.snp.makeConstraints { make in
+            make.left.equalTo(withDrawBtn.snp.right)
+            make.right.equalTo(changeVoiceBtn.snp.left)
+            make.height.equalTo(42)
+            make.top.equalTo(withDrawBtn).offset(6)
+            
         }
         
         progessSildeBackV.snp.makeConstraints { make in
-            make.left.equalTo(closeBtn.snp.right).offset(16)
-            make.right.equalTo(nextBtn.snp.left).offset(-16)
-            make.centerY.equalTo(closeBtn)
+            make.left.width.equalTo(recordBtn)
+            make.top.equalTo(recordBtn).offset(-30)
             make.height.equalTo(8)
         }
         
@@ -291,6 +318,7 @@ public class BFRecordScreenController: BFBaseViewController {
             make.height.equalTo(20)
         }
         
+ 
         openCameraBtn.snp.makeConstraints { make in
             make.right.equalToSuperview().offset(-12)
             make.top.equalToSuperview().offset(98)
@@ -304,6 +332,14 @@ public class BFRecordScreenController: BFBaseViewController {
             make.width.equalTo(80)
             make.height.equalTo(124)
         }
+ 
+        
+        withDrawBtn.imageEdgeInsets = UIEdgeInsets(top: -withDrawBtn.imageView!.height, left: 0, bottom: 0, right: -withDrawBtn.titleLabel!.width);
+        withDrawBtn.titleEdgeInsets = UIEdgeInsets(top: withDrawBtn.titleLabel!.height + 2, left: -withDrawBtn.imageView!.width, bottom: 0, right: 0);
+        
+        changeVoiceBtn.imageEdgeInsets = UIEdgeInsets(top: -changeVoiceBtn.imageView!.height-2, left: 0, bottom: 0, right: -changeVoiceBtn.titleLabel!.width);
+        changeVoiceBtn.titleEdgeInsets = UIEdgeInsets(top: changeVoiceBtn.titleLabel!.height+2, left: -changeVoiceBtn.imageView!.width, bottom: 0, right: 0);
+
         
     }
     
@@ -367,13 +403,15 @@ public class BFRecordScreenController: BFBaseViewController {
     }
     
     @objc func startRecord(){
-        recordBtn.setImage(imageInRecordScreenKit(by: "mic2"), for: .normal)
         BFLog(1, message: "start \(UIControl.Event.touchDown)")
+        isRecording = true
+
         pause()
 
         let model = PQVoiceModel()
         model.startTime = CMTimeGetSeconds(self.currentAssetProgress)
         model.volume = 100
+
 //        recorderManager.voiceModel = model
 //        recorderManager.startRecord(index: recordList.count)
         movie?.startProcessing()
@@ -385,13 +423,22 @@ public class BFRecordScreenController: BFBaseViewController {
             avatarView.beginRecord()
         }
  
+        recorderManager.voiceModel = model
+        recorderManager.startRecord(index: recordList.count)
+//        movie?.startProcessing()
+        assetPlayer?.volume = 0
+        assetPlayer?.play()
+ 
     }
     
     @objc func endRecord(){
-        recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
+        
+    
         // 存储录音
-//        recorderManager.endRecord()
+ 
+     recorderManager.endRecord()
         isRecording = false
+ 
 
         pause()
         if(!avatarView.isHidden){
@@ -400,19 +447,18 @@ public class BFRecordScreenController: BFBaseViewController {
     }
     
     func cancleRecord(){
-        recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
-        recorderManager.cancleRecord()
+
         isRecording = false
+        recorderManager.cancleRecord()
         
         pause()
     }
-    @objc func closePage(){
+    @objc func withdrawAction(){
         pause()
-        closeActionHandle?()
     }
     
-    @objc func nextAction(){
-        nextActionHandle?()
+    @objc func changeVoiceAction(){
+//        nextActionHandle?()
         pause()
     }
     
@@ -420,17 +466,21 @@ public class BFRecordScreenController: BFBaseViewController {
         btn.isSelected ? pause() : play()
     }
 
-    @objc public func sliderTouchBegan(sender _: UISlider) {
+    @objc func sliderTouchBegan(sender _: UISlider) {
         isDragingProgressSlder = true
         pause()
     }
 
-    @objc public func sliderTouchEnded(sender: UISlider) {
+    @objc func sliderTouchEnded(sender: UISlider) {
         changeProgress(progress: sender.value)
         isDragingProgressSlder = false
         currentPlayRecordIndex = -1
         hadPrepareToPlayRecord = false
     }
+    @objc func sliderValueDidChanged(sender: UISlider) {
+        changeProgress(progress: sender.value)
+
+    }
     
     // MARK: - 权限申请
     func checkStatus(show: Bool = true) -> Bool {
@@ -560,24 +610,23 @@ public class BFRecordScreenController: BFBaseViewController {
     }
     
     func play(){
-        BFLog(1, message: "开始播放")
+        BFLog(1, message: "开始播放 \(self.currentAssetProgress.seconds)")
+        isNormalPlaying = true
         assetPlayer?.volume = 0.2
         movie?.startProcessing()
-        isNormalPlaying = true
-        let second = assetPlayer?.currentItem?.currentTime()
-        assetPlayer?.seek(to: second ?? CMTime.zero, toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: {[weak self] finished in
-            if finished {
-                self?.assetPlayer?.play()
-            }
-        })
+        
+        self.assetPlayer?.play()
     }
     
     func pause(){
         BFLog(1, message: "暂停播放")
-        movie?.cancelProcessing()
+        isNormalPlaying = false
+//        movie?.cancelProcessing()
         assetPlayer?.pause()
         recordPlayer?.pause()
-        isNormalPlaying = false
+        
+        assetPlayer?.seek(to: self.currentAssetProgress , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
+        })
     }
     
     func fetchVideo(){
@@ -606,7 +655,7 @@ public class BFRecordScreenController: BFBaseViewController {
                                           contentMode: .aspectFit,
                                               options: option)
             { (image, nil) in
-                 //image就是图片
+                 // 设置首帧/封面
                 if image != nil {
                     let pic = GPUImagePicture(image: image)
                     let filet = GPUImageFilter()
@@ -614,37 +663,15 @@ public class BFRecordScreenController: BFBaseViewController {
                     filet.addTarget(self.playView)
                     pic?.processImage()
                 }
-                
             }
             
-//          使用copy资源到本地的方式
-//            let outFilePath = NSHomeDirectory().appending("/Documents/simple.mp4")
-//            let outFileUrl = URL(fileURLWithPath: outFilePath)
-//            if FileManager.default.fileExists(atPath: outFilePath) {
-//                try? FileManager.default.removeItem(atPath: outFilePath)
-//            }
-//
-//            let assetResources = PHAssetResource.assetResources(for: asset)
-            
-//            if let rsc = assetResources.first(where: { res in
-//                res.type == .video  || res.type == .pairedVideo
-//            }) {
-//                PHAssetResourceManager.default().writeData(for: rsc, toFile:outFileUrl, options: nil) {[weak self] error in
-//                    if error == nil {
-//                        DispatchQueue.main.async {[weak self] in
-//                            self?.setVideoPlay(url: outFileUrl)
-//                            self?.setAudioPlay(url: outFileUrl)
-//                        }
-//                    }else{
-//                        BFLog(1, message: "导出视频相exportAsynchro faile")
-//                    }
-//                }
-//            }else{
-//
-//            }
             PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
                 if let urlass = asset as? AVURLAsset {
                     self?.avasset = urlass
+                    DispatchQueue.main.async {[weak self] in
+                        let progressThumV = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 20, width: cScreenWidth, height: 50), videoAsset: self!.avasset!)
+                        self?.bottomeView.addSubview(progressThumV)
+                    }
                 }
             })
         }
@@ -662,6 +689,8 @@ public class BFRecordScreenController: BFBaseViewController {
         let filter = GPUImageFilter()
         movie?.addTarget(filter)
         filter.addTarget(playView)
+        
+        movie?.startProcessing()
     }
     
     func setAudioPlay(item:AVPlayerItem){
@@ -673,6 +702,13 @@ public class BFRecordScreenController: BFBaseViewController {
             assetPlayer = AVPlayer(playerItem: item)
             avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
              //    进度监控
+                if !((self?.isNormalPlaying ?? false) || (self?.isRecording ?? false)) {
+                    return
+                }
+                
+                // 播放对应的录音音频
+                self?.playRecord(at: time)
+
                 self?.currentAssetProgress = time
                 BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
                 if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
@@ -681,8 +717,6 @@ public class BFRecordScreenController: BFBaseViewController {
                         self?.progreddL.text = String(format: "%.2f / %.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
                     }
                 }
-                // 播放对应的录音音频
-                self?.playRecord(at: time)
             } as? NSKeyValueObservation
         }
 
@@ -707,32 +741,38 @@ public class BFRecordScreenController: BFBaseViewController {
             }
         })
         movie?.removeAllTargets()
+        movie?.removeFramebuffer()
+        GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
+        
     }
     
     //MARK: - 录音对应图像绘制
     
     func changeProgress(progress:Float) {
         if let duration = assetPlayer?.currentItem?.duration {
-            assetPlayer!.seek(to: CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) {[weak self] finished in
-                if finished{
-                    BFLog(1, message: "拖动成功")
-                    self?.movie?.startProcessing()
-                }
+            self.currentAssetProgress = CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100)
+            assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) {[weak self] finished in
+//                if finished{
+//                    BFLog(1, message: "拖动成功")
+//                    self?.movie?.startProcessing()
+//                }
             }
         }
     }
     
     func drawOrUpdateRecordProgessLable(){
-        progessSildeBackV.subviews.forEach { vv in
-            vv.removeFromSuperview()
-        }
         DispatchQueue.main.async {[weak self] in
+            self?.progessSildeBackV.subviews.forEach { vv in
+                vv.removeFromSuperview()
+            }
             if let totalDur = self?.asset?.duration, totalDur > 0, let list = self?.recordList {
                 let width = self?.progessSildeBackV.width ?? 0
                 let height = self?.progessSildeBackV.height ?? 0
                 list.forEach { model in
+ 
                     let lineV = UIView(frame: CGRect(x: model.startTime * Double(width) / totalDur , y: 0, width: (model.endTime - model.startTime) * Double(width) / totalDur, height: Double(height)))
-                    lineV.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67")
+                    lineV.backgroundColor = ThemeStyleGreen()
+
                     self?.progessSildeBackV.addSubview(lineV)
                 }
             }

+ 80 - 0
BFRecordScreenKit/Classes/BFVideoThumbImageFetchHelper.swift

@@ -0,0 +1,80 @@
+//
+//  BFVideoThumbImageFetchHelper.swift
+//  BFRecordScreenKit
+//
+//  Created by 胡志强 on 2021/12/3.
+//
+
+import Foundation
+import AVFoundation
+import BFCommonKit
+
+/// 视频分解成帧
+/// - parameter fileUrl                 : 视频地址
+/// - parameter fps                     : 自定义帧数 每秒内取的帧数
+/// - parameter splitCompleteClosure    : 回调
+func splitVideoFileUrlFps(urlAsset:AVURLAsset, fps:Float, splitCompleteClosure:@escaping ((Bool, [UIImage]?) -> Void)) {
+    var splitImages = [UIImage]()
+    
+    var times = [NSValue]()
+    
+    for i in 0...Int(urlAsset.duration.seconds * Float64(fps)) {
+        let timeValue = NSValue(time: CMTimeMake(value: Int64(i), timescale: Int32(fps)) )
+        
+        times.append(timeValue)
+    }
+    
+    let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
+    imgGenerator.requestedTimeToleranceBefore = CMTime.zero
+    imgGenerator.requestedTimeToleranceAfter = CMTime.zero
+    
+    let timesCount = times.count
+    var cocu = 0
+    //获取每一帧的图片
+    imgGenerator.generateCGImagesAsynchronously(forTimes: times) { (requestedTime, image, actualTime, result, error) in
+        cocu += 1
+        var isSuccess = false
+        switch (result) {
+        case AVAssetImageGenerator.Result.cancelled:
+            BFLog(1, message: "aaa: cancel")
+
+        case AVAssetImageGenerator.Result.failed:
+            BFLog(1, message: "aaa: failed")
+
+        case AVAssetImageGenerator.Result.succeeded:
+            let framImg = UIImage(cgImage: image!)
+            splitImages.append(framImg)
+            BFLog(1, message: "aaa: \(requestedTime.seconds) - \(actualTime.seconds)")
+//            if (Int(requestedTime.value) == (timesCount-1)) {
+//            }
+            isSuccess = true
+        @unknown default:
+            break
+        }
+        if cocu == timesCount { //最后一帧时 回调赋值
+            splitCompleteClosure(isSuccess, splitImages)
+            BFLog(1, message: "aaa: complete")
+
+        }
+    }
+   
+}
+
+/// 视频分解成帧
+/// - parameter fileUrl                 : 视频地址
+/// - parameter fps                     : 自定义帧数 每秒内取的帧数
+/// - parameter splitCompleteClosure    : 回调
+func getThumbImageAtTime(urlAsset: AVURLAsset, time:CMTime) -> UIImage? {
+    let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
+    imgGenerator.requestedTimeToleranceBefore = CMTime.zero
+    imgGenerator.requestedTimeToleranceAfter = CMTime.zero
+    
+    var cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
+    if cgImg == nil  {
+        imgGenerator.requestedTimeToleranceBefore = CMTime.positiveInfinity
+        imgGenerator.requestedTimeToleranceAfter = CMTime.positiveInfinity
+        cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
+    }
+
+    return cgImg == nil ? nil : UIImage(cgImage: cgImg!)
+}

+ 74 - 0
BFRecordScreenKit/Classes/BFVideoThumbProgressView.swift

@@ -0,0 +1,74 @@
+//
+//  BFVideoThumbProgressView.swift
+//  BFRecordScreenKit
+//
+//  Created by 胡志强 on 2021/12/3.
+//
+
+import Foundation
+import UIKit
+import AVFoundation
+import BFCommonKit
+import SnapKit
+
+class BFVideoThumbProgressView: UIView {
+    var videoAsset : AVURLAsset?
+    
+    var thumbImgs = [UIImage]()
+    
+    init(frame: CGRect, videoAsset:AVURLAsset) {
+        super.init(frame: frame)
+        self.videoAsset = videoAsset
+        addSubview(progressView)
+
+        splitVideoFileUrlFps(urlAsset: videoAsset, fps: 2) {[weak self] isSuccess, images in
+            if isSuccess{
+                self?.thumbImgs = images!
+                DispatchQueue.main.async {[weak self] in
+                    if let sself = self{
+                        for (i, img) in images!.enumerated() {
+                            let iv = UIImageView(image: img)
+                            iv.contentMode = .scaleAspectFill
+                            sself.progressView.addSubview(iv)
+                            iv.snp.makeConstraints { make in
+                                make.left.equalTo(CGFloat(i) * sself.height + sself.width * 0.5)
+                                make.top.height.equalToSuperview()
+                                make.width.equalTo(sself.height)
+                            }
+                        }
+                        sself.progressView.contentSize = CGSize(width: CGFloat(images!.count) *  sself.height + sself.width, height: sself.height)
+                    }
+                }
+            }
+        }
+    }
+
+    required init?(coder: NSCoder) {
+        fatalError("init(coder:) has not been implemented")
+    }
+    
+    lazy var progressView : UIScrollView = {
+        let sv = UIScrollView()
+        sv.bounces = false
+        sv.backgroundColor = .clear
+        sv.decelerationRate = .fast
+        sv.showsHorizontalScrollIndicator = false
+        
+        
+        return sv
+    }()
+    
+    override func didMoveToWindow() {
+        super.didMoveToWindow()
+        
+        
+    }
+    
+    override func layoutSubviews() {
+        super.layoutSubviews()
+        progressView.snp.makeConstraints { make in
+            make.edges.equalToSuperview()
+        }
+        
+    }
+}

+ 2 - 2
BFRecordScreenKit/Classes/BFVoiceRecordManager.swift

@@ -15,7 +15,7 @@ class BFVoiceRecordManager {
     // 录音相关
     var audioRecorder: NXAudioRecorder?
     var limitedDuration:Double = 600       // 限制录制时长
-    var endRecordHandle : ((Bool, PQVoiceModel?) -> Void)?
+    var endRecordHandle : ((PQVoiceModel?, Error?) -> Void)?
     var cancelRecordHandle : ((Error?) -> Void)?
 
     var recorderFilePath : String = ""
@@ -105,7 +105,7 @@ class BFVoiceRecordManager {
                 NXNoiseReduction().denoise(url, outFile: noiseFilePath)
                 if let model = self?.voiceModel{
                     model.wavFilePath = noiseFilePath
-                    self?.endRecordHandle?(true, model)
+                    self?.endRecordHandle?(model, nil)
                 }
 
 //