Переглянути джерело

Merge branch 'master' of https://git.yishihui.com/iOS/BFRecordScreenKit
合并代码

jsonwang 3 роки тому
батько
коміт
4e45c5fdb9

+ 34 - 4
BFRecordScreenKit/Classes/BFRecordExport.swift

@@ -11,6 +11,7 @@ import BFMediaKit
 import Foundation
 import GPUImage
 import Photos
+import UIKit
 
 public enum ExportError : Int {
     case FileNotExist   = -31001
@@ -59,14 +60,30 @@ public class BFRecordExport {
 
             // 切割视频素材
             for (_, itemModel) in itemModels.enumerated() {
+                itemModel.videoStickers.removeAll()
                 
+                if itemModel.mediaType == .IMAGE {
+                    // 图片素才
+                    let duration = itemModel.materialDuraion
+                    let sticker = splitBaseMaterial(timelineIn: totalDur, model_in: 0, duration: duration)
+                    sticker.originalData = itemModel.coverImg?.pngData()
+                    sticker.volumeGain = 0
+                    sticker.type = StickerType.IMAGE.rawValue
+                    itemModel.videoStickers.append(sticker)
+                    BFLog(1, message: "image sticker - timIn:\(sticker.timelineIn), modIn:\(sticker.model_in), dur:\(duration)")
+                    totalDur += duration
+                    continue
+                }
+                
+                // 视频处理
                 if let localPath = itemModel.localPath {
                     if !FileManager.default.fileExists(atPath: localPath) {
                         let error = NSError(domain: "err", code: ExportError.FileNotExist.rawValue, userInfo: ["msg": "file not exist"])
                         exportCompletion?(error as Error, nil)
                         return
                     }
-                    itemModel.videoStickers.removeAll()
+                   
+                    
                     if synthesisAll {
                         var subDur = 0.0
                         let drangs = itemModel.dealedDurationRanges
@@ -191,8 +208,21 @@ public class BFRecordExport {
             return
         }
 
-        guard let totalDuration = data?.reduce(0, { partialResult, itemModell in
-            itemModell.materialDuraion + partialResult
+        guard let totalDuration = data?.reduce(0.0, { partialResult, itemModell in
+            var modelDuraion = 0.0
+            if itemModell.mediaType == .IMAGE {
+                modelDuraion = itemModell.materialDuraion
+            }else if itemModell.mediaType == .VIDEO{
+                modelDuraion = itemModell.dealedDurationRanges.reduce(0.0) { partialResult, srange in
+//                    partialResult + (!synthesisAll && srange.isRecord) ?
+                    if synthesisAll {
+                        return partialResult + srange.range.duration.seconds
+                    }else {
+                        return partialResult + (srange.isRecord ? srange.range.duration.seconds : 0)
+                    }
+                }
+            }
+            return (partialResult ?? 0) + modelDuraion
         }) else {
             let error = NSError(domain: "err", code: ExportError.TotalDurError.rawValue, userInfo: ["msg": "时长计算出错"])
             exportCompletion?(error as Error, nil)
@@ -241,7 +271,7 @@ public class BFRecordExport {
             }
             let tempBeginExport = Date().timeIntervalSince1970
             if exporter!.prepare(videoSize: outputSize, videoAverageBitRate: orgeBitRate) {
-                exporter!.start(playeTimeRange: CMTimeRange(start: CMTime.zero, end: synthesisAll ? CMTime(seconds: totalDuration, preferredTimescale: 100) : composition.duration))
+                exporter!.start(playeTimeRange: CMTimeRange(start: CMTime.zero, end: CMTime(seconds: totalDuration, preferredTimescale: 1000)))
             }
             exporter?.progressClosure = { [weak self] _, _, progress in
                 //            BFLog(message: "正片合成进度 \(progress * 100)%")

+ 90 - 0
BFRecordScreenKit/Classes/BFRecordItemModel.swift

@@ -47,6 +47,7 @@ public class BFRecordItemModel: NSObject {
         height = phasset.pixelHeight
         if phasset.mediaType == .image {
             mediaType = .IMAGE
+            localPath = "image"
         } else if phasset.mediaType == .video {
             mediaType = .VIDEO
             fetchPlayItem(phasset)
@@ -109,6 +110,17 @@ public class BFRecordItemModel: NSObject {
         dealedDurationRanges.removeAll()
 
         var start: Double = 0
+        
+        if voiceStickers.count == 0 {
+            let model = PQVoiceModel()
+            model.startTime = 0
+            model.endTime = 2
+            model.duration = "2"
+            model.voiceType = VOICETYPT.None.rawValue
+            voiceStickers.append(model)
+            
+            materialDuraion = 2
+        }
 
         var list: [PQVoiceModel]
         list = voiceStickers.sorted { model1, model2 in
@@ -132,4 +144,82 @@ public class BFRecordItemModel: NSObject {
             dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
         }
     }
+    
+    /// 视频分解成帧
+    /// - parameter fps                          : 自定义帧数 每秒内取的帧数
+    /// - parameter firstImagesCount             : 获取首先N张连续视频帧后先返回给调用方使用作为缓冲
+    /// - parameter splitCompleteClosure    : 回调
+    func splitVideoFileUrlFps(fps: Double, firstImagesCount:Int = 0, splitCompleteClosure: @escaping ((Bool, [UIImage]) -> Void)) {
+        guard let localPath = localPath else {
+            return
+        }
+
+        var splitImages = [UIImage]()
+
+        var times = [NSValue]()
+
+        let urlAsset = AVURLAsset(url: URL(fileURLWithPath: localPath))
+        let start = 0
+        let end = Int(urlAsset.duration.seconds * Float64(fps))
+
+        for i in start...end {
+            let timeValue = NSValue(time: CMTimeMake(value: Int64(i * 1000), timescale: Int32(fps * 1000)))
+
+            times.append(timeValue)
+        }
+
+        let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
+        imgGenerator.requestedTimeToleranceBefore = CMTime.zero
+        imgGenerator.requestedTimeToleranceAfter = CMTime.zero
+        imgGenerator.appliesPreferredTrackTransform = true
+        let timesCount = times.count
+        var cocu = 0
+        // 获取每一帧的图片
+        imgGenerator.generateCGImagesAsynchronously(forTimes: times) { _, image, _, result, _ in
+            cocu += 1
+            switch result {
+            case AVAssetImageGenerator.Result.cancelled:
+                BFLog(1, message: "splitVideo: cancel")
+
+            case AVAssetImageGenerator.Result.failed:
+                BFLog(1, message: "splitVideo: failed")
+
+            case AVAssetImageGenerator.Result.succeeded:
+                let framImg = UIImage(cgImage: image!)
+                splitImages.append(framImg)
+    //            BFLog(1, message: "aaa: \(requestedTime.seconds) - \(actualTime.seconds)")
+            @unknown default:
+                break
+            }
+            
+            if cocu == firstImagesCount {
+                splitCompleteClosure(false, splitImages)
+            }
+            
+            if cocu == timesCount { // 最后一帧时 回调赋值
+                splitCompleteClosure(true, splitImages)
+                BFLog(1, message: "splitVideo: complete")
+            }
+        }
+    }
+
+    /// 视频分解成帧
+    /// - parameter fileUrl                 : 视频地址
+    /// - parameter fps                     : 自定义帧数 每秒内取的帧数
+    /// - parameter splitCompleteClosure    : 回调
+    func getThumbImageAtTime(urlAsset: AVURLAsset, time: CMTime) -> UIImage? {
+        let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
+        imgGenerator.requestedTimeToleranceBefore = CMTime.zero
+        imgGenerator.requestedTimeToleranceAfter = CMTime.zero
+
+        var cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
+        if cgImg == nil {
+            imgGenerator.requestedTimeToleranceBefore = CMTime.positiveInfinity
+            imgGenerator.requestedTimeToleranceAfter = CMTime.positiveInfinity
+            cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
+        }
+
+        return cgImg == nil ? nil : UIImage(cgImage: cgImg!)
+    }
+
 }

+ 41 - 23
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -998,11 +998,12 @@ public class BFRecordScreenController: BFBaseViewController {
         currentPlayRecordIndex = -1
         hadPrepareToPlayRecord = false
         BFLog(1, message: "isDragingProgressSlder : \(isDragingProgressSlder)")
-        searchStopAtRecordRange()
+        searchStopAtRecordRange(needAdsorb: true)
         changeWithDrawBtnLayout(false)
     }
 
-    func searchStopAtRecordRange() {
+    // 是否吸附在录音首尾处
+    func searchStopAtRecordRange(needAdsorb:Bool = false) {
         pauseTime = currentAssetProgress.seconds
 
         // TODO: 滑动,播放暂停,撤销时,判断是否停止录音区间,是则删除相关录音,画笔,头像,字幕
@@ -1014,29 +1015,39 @@ public class BFRecordScreenController: BFBaseViewController {
 
         if elems.count > 0 {
             //  TODO: 停在了录音区间,显示删除按钮
-            if fabs(elems[0].1.endTime - currentAssetProgress.seconds) < 0.5 {
-                BFLog(1, message: "吸附在录音结尾")
-//                changeWithDrawBtnLayout(false)
-                changeProgress(progress: Float(elems[0].1.endTime / itemModels[currItemModelIndex].materialDuraion))
-                progressThumV.progress = elems[0].1.endTime
+            if needAdsorb {
+                if fabs(elems[0].1.endTime - currentAssetProgress.seconds) < 0.5 {
+                    BFLog(1, message: "吸附在录音结尾")
+    //                changeWithDrawBtnLayout(false)
+                    changeProgress(progress: Float(elems[0].1.endTime / itemModels[currItemModelIndex].materialDuraion))
+                    progressThumV.progress = elems[0].1.endTime
+
+                    deleteRecordBtn.isHidden = true
+                    recordBtn.isHidden = false
+                    isStopAtRecordRange = -1
+                    BFLog(1, message: "停在了录音区间外 \(isStopAtRecordRange)")
+                } else {
+                    if fabs(elems[0].1.startTime - currentAssetProgress.seconds) < 0.5 {
+                        BFLog(1, message: "吸附在录音开始")
+    //                    changeWithDrawBtnLayout(true)
+                        changeProgress(progress: Float(elems[0].1.startTime / itemModels[currItemModelIndex].materialDuraion))
+                        progressThumV.progress = elems[0].1.startTime
+                    }
+                    deleteRecordBtn.isHidden = false
+                    recordBtn.isHidden = true
+                    isStopAtRecordRange = elems.first!.0
 
-                deleteRecordBtn.isHidden = true
-                recordBtn.isHidden = false
-                isStopAtRecordRange = -1
-                BFLog(1, message: "停在了录音区间外 \(isStopAtRecordRange)")
-            } else {
-                if fabs(elems[0].1.startTime - currentAssetProgress.seconds) < 0.5 {
-                    BFLog(1, message: "吸附在录音开始")
-//                    changeWithDrawBtnLayout(true)
-                    changeProgress(progress: Float(elems[0].1.startTime / itemModels[currItemModelIndex].materialDuraion))
-                    progressThumV.progress = elems[0].1.startTime
+                    BFLog(1, message: "停在了录音区间里 \(isStopAtRecordRange)")
                 }
+            }else{
                 deleteRecordBtn.isHidden = false
                 recordBtn.isHidden = true
-                isStopAtRecordRange = elems.first!.0
 
+                isStopAtRecordRange = elems.first!.0
                 BFLog(1, message: "停在了录音区间里 \(isStopAtRecordRange)")
+
             }
+            
         } else {
             deleteRecordBtn.isHidden = true
             recordBtn.isHidden = false
@@ -1136,6 +1147,8 @@ public class BFRecordScreenController: BFBaseViewController {
                 self?.hadPrepareToPlayRecord = false
                 self?.currentPlayRecordIndex = -1
                 self?.assetPlayer?.volume = self?.noSpeakVolume ?? 0
+//                self?.deleteRecordBtn.isHidden = true
+//                self?.recordBtn.isHidden = false
                 didPlayToEndTime(recordedAudio, newItem)
             }
             avplayerTimeObserver?.invalidate()
@@ -1150,10 +1163,10 @@ public class BFRecordScreenController: BFBaseViewController {
             return
         }
         synced(currentPlayRecordIndex) { [weak self] in
-            guard let self = self else {
+            guard let sself = self else {
                 return
             }
-            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(self.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startTime),\(CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2)")
+            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startTime),\(CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2)")
 
             if !hadPrepareToPlayRecord,
                recordPlayer?.currentItem?.duration.timescale != 0,
@@ -1162,14 +1175,16 @@ public class BFRecordScreenController: BFBaseViewController {
             {
                 // 应当开始播放了
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
-                if currentPlayRecordIndex == -1, self.isNormalPlaying {
+                if currentPlayRecordIndex == -1, sself.isNormalPlaying {
                     let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
-                    self.assetPlayer?.volume = self.haveSpeakVolume
+                    sself.assetPlayer?.volume = sself.haveSpeakVolume
 
-                    DispatchQueue.main.async { [weak self] in
+                    DispatchQueue.main.async { [weak sself] in
                         self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second * 1_000_000), timescale: 1_000_000), toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
                             if finished, self?.isNormalPlaying ?? false {
                                 self?.recordPlayer?.play()
+//                                self?.deleteRecordBtn.isHidden = false
+//                                self?.recordBtn.isHidden = true
                                 BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds ?? 0)")
                             }
                         })
@@ -1208,6 +1223,9 @@ public class BFRecordScreenController: BFBaseViewController {
             // 处理图片音频播放
             imageRecordPlay()
         }
+        
+        deleteRecordBtn.isHidden = true
+        recordBtn.isHidden = false
     }
 
     func pause() {

+ 13 - 10
BFRecordScreenKit/Classes/RecordScreen/View/BFVideoThumbProgressView.swift

@@ -16,6 +16,7 @@ class BFVideoThumbProgressView: UIView {
     var recordItem: BFRecordItemModel? {
         didSet {
             // 指针回归
+            BFLog(1, message: "new recorditem")
             progress = 0
             if recordItem?.mediaType == .VIDEO {
                 dealWithVideoThumb()
@@ -122,21 +123,23 @@ class BFVideoThumbProgressView: UIView {
             }
             
             let fps = Double(count) / dur
-            splitVideoFileUrlFps(urlAsset: videoAsset, fps: fps, firstImagesCount: Int(ceil(width/2.0/thumbImageWidth))) { [weak self] hadGetAll, images in
-                guard let sself = self, let recordItem = sself.recordItem else {
+            recordItem!.splitVideoFileUrlFps(fps: fps, firstImagesCount: Int(ceil(width/2.0/thumbImageWidth))) { [weak self, weak recordItem] hadGetAll, images in
+                guard let sself = self, let sitem = recordItem else {
                     return
                 }
-                BFLog(1, message: "获取缩略图:\(hadGetAll), \(Date().timeIntervalSince(date))")
-                recordItem.thumbImgs.removeAll()
-                recordItem.thumbImgs.append(contentsOf: images)
+
+                BFLog(1, message: "获取缩略图:\(hadGetAll), \(Date().timeIntervalSince(date)), \( sitem.localPath ?? "aa")")
+                sitem.thumbImgs.removeAll()
+                sitem.thumbImgs.append(contentsOf: images)
                 
                 // 不足数则补充足够帧数
-                while (hadGetAll && recordItem.thumbImgs.count < count && images.count > 0) {
-                    recordItem.thumbImgs.append(images.last!)
+                while (hadGetAll && sitem.thumbImgs.count < count && images.count > 0) {
+                    sitem.thumbImgs.append(images.last!)
                 }
-                
-                if recordItem.localPath?.contains(videoAsset.url.lastPathComponent) ?? false {
-                    sself.addThumbImages(images: recordItem.thumbImgs)
+                if sitem.localPath == sself.recordItem!.localPath {
+                    sself.addThumbImages(images: sitem.thumbImgs)
+                }else{
+                    BFLog(1, message: "thumbImgs.count:\(sitem.thumbImgs.count)")
                 }
             }
         }