Ver código fonte

1.添加边录边显示进度 2.调整进度加载逻辑

wenweiwei 3 anos atrás
pai
commit
7ccb28cbdf

+ 58 - 46
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -61,7 +61,7 @@ public class BFRecordScreenController: BFBaseViewController {
     }
 
     var currentAssetProgress: CMTime = .zero // 当前素材播放的进度
-
+    var recordStartTime: Double = 0 // 录制开始时间
     //    public var recordList:[PQVoiceModel] = [PQVoiceModel]()
 
     var assetPlayer: AVPlayer? // 原视频音频播放器
@@ -132,9 +132,11 @@ public class BFRecordScreenController: BFBaseViewController {
                     model.endTime = sself.currentAssetProgress.seconds
                     self?.isEndPlay = true
                 }
-//                sself.drawOrUpdateRecordProgessLable()
-
+                // 注:在录制结束时矫正当前位置,避免跟指针无法对其
+                self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
                 sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
+                // 重置录制开始时间
+                sself.recordStartTime = 0
             }
         }
         return manager
@@ -147,7 +149,7 @@ public class BFRecordScreenController: BFBaseViewController {
     var avplayerTimeObserver: NSKeyValueObservation?
 
     var indirectionView: BFIndirectionProgressView?
-    
+
     lazy var progreddL: UILabel = {
         let l = UILabel()
         l.textAlignment = .center
@@ -407,7 +409,7 @@ public class BFRecordScreenController: BFBaseViewController {
         _ = disablePopGesture()
 
         // add by ak 取 nsl token
-        BFRecordScreenViewModel.getNlsAccessToken {[weak self] token, appkey in
+        BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
             BFLog(message: "nls appkey is \(appkey), token is \(token)")
 //            self?.speechTranscriberUtil = PQSpeechTranscriberUtil(token, appid: appkey)
         }
@@ -714,11 +716,11 @@ public class BFRecordScreenController: BFBaseViewController {
             let model = itemModels[currItemModelIndex].voiceStickers[isStopAtRecordRange]
             itemModels[currItemModelIndex].voiceStickers.remove(at: isStopAtRecordRange)
             events.append(WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)]))
-            drawOrUpdateRecordProgessLable()
+            indirectionView?.deleteItem(index: isStopAtRecordRange)
+//            drawOrUpdateRecordProgessLable()
             searchStopAtRecordRange()
 
             deleteTitles(voiceModel: model)
-
         }
     }
 
@@ -737,7 +739,9 @@ public class BFRecordScreenController: BFBaseViewController {
         model.volume = 100
         recorderManager.voiceModel = model
         recorderManager.startRecord(index: 1)
-
+        if recordStartTime <= 0 {
+            recordStartTime = currentAssetProgress.seconds
+        }
         movie?.startProcessing()
         assetPlayer?.volume = 0
         assetPlayer?.play()
@@ -766,16 +770,21 @@ public class BFRecordScreenController: BFBaseViewController {
             (collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
         }
         recorderManager.audioRecorder?.recorderProgross = { [weak self] progress in
-            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
-                self?.imageRecordProgress(isRecord: true, progress: progress)
-            }
-            self?.indirectionView?.setProgress(index: (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.count ?? 0), start: self?.currentAssetProgress.seconds ?? 0, progress: progress)
-        }
-        recorderManager.audioRecorder?.recorderStartHandle = {[weak self] in
+            BFLog(message: "录音进度--\(progress)")
             if self?.indirectionView == nil {
-                self?.indirectionView = BFIndirectionProgressView.init(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
+                self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
                 self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
             }
+            // 更新录制进度
+            // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
+            // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
+            if self?.isRecording ?? false {
+                let ratioX = 0.08
+                self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: progress - ratioX)
+            }
+            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
+                self?.imageRecordProgress(isRecord: true, progress: progress)
+            }
         }
     }
 
@@ -814,7 +823,8 @@ public class BFRecordScreenController: BFBaseViewController {
                 }) {
                     let model = itemModels[currItemModelIndex].voiceStickers[modelIndex]
                     itemModels[currItemModelIndex].voiceStickers.remove(at: modelIndex)
-
+                    // 移除
+                    indirectionView?.deleteItem(index: modelIndex)
                     var tuples = action.deletedVoices
                     if tuples != nil, tuples!.count > 0 {
                         tuples!.sort { tuple1, tuple2 in
@@ -824,18 +834,13 @@ public class BFRecordScreenController: BFBaseViewController {
                             itemModels[currItemModelIndex].voiceStickers.insert(tuple.0, at: tuple.1)
                         }
                     }
-                    
-                    //恢复字幕
+                    // 恢复字幕
                     let titleTuples = action.deletedTittles
                     if titleTuples != nil, titleTuples!.count > 0 {
-                       
                         titleTuples?.forEach { titleTuple in
                             itemModels[currItemModelIndex].titleStickers.insert(titleTuple.0, at: titleTuple.1)
                         }
                     }
-                    
-                    
-                    drawOrUpdateRecordProgessLable()
                     jumpTime = model.startTime
                 }
             } else if action.type == 3 {
@@ -849,18 +854,16 @@ public class BFRecordScreenController: BFBaseViewController {
                         itemModels[currItemModelIndex].voiceStickers.insert(tuple.0, at: tuple.1)
                     }
                 }
-                
-                //恢复字幕
+
+                // 恢复字幕
                 let titleTuples = action.deletedTittles
                 if titleTuples != nil, titleTuples!.count > 0 {
-                   
                     titleTuples?.forEach { titleTuple in
                         itemModels[currItemModelIndex].titleStickers.insert(titleTuple.0, at: titleTuple.1)
                     }
                 }
-                
-                
-                drawOrUpdateRecordProgessLable()
+
+//                drawOrUpdateRecordProgessLable()
             } else {}
             events.removeLast()
             let dur = itemModels[currItemModelIndex].materialDuraion
@@ -1035,7 +1038,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 self?.currentPlayRecordIndex = -1
                 didPlayToEndTime(recordedAudio, newItem)
             }
-            _ = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) { time in
+            _ = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { time in
                 periodicTimeObserver(time, newItem)
             } as? NSKeyValueObservation
         }
@@ -1144,8 +1147,8 @@ public class BFRecordScreenController: BFBaseViewController {
                 itemModels.append(itemModel)
                 if index == 0 {
                     if asset.mediaType == .video {
-                        itemModel.fetchAVUrlAsset = { [weak self, weak itemModel] uralss in
-                            self?.export(avsss:uralss)
+                        itemModel.fetchAVUrlAsset = { [weak self, weak itemModel] _ in
+//                            self?.export(avsss:uralss)
                             DispatchQueue.main.async { [weak self] in
                                 self?.progressThumV.recordItem = itemModel
                                 self?.progressThumV.isHidden = false
@@ -1198,7 +1201,7 @@ public class BFRecordScreenController: BFBaseViewController {
         } else {
             assetPlayer = AVPlayer(playerItem: item)
             assetPlayer?.volume = 0
-            avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) { [weak self] time in
+            avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
                 // 进度监控
                 self?.periodicTimeObserver(item: item, time: time)
 
@@ -1215,15 +1218,19 @@ public class BFRecordScreenController: BFBaseViewController {
 
         NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer?.currentItem, queue: .main) { [weak self] notify in
             BFLog(1, message: "AVPlayerItemDidPlayToEndTime = \(notify)")
-            self?.isNormalPlaying = false
-            //            self?.assetPlayer?.seek(to: CMTime.zero)
-            //            self?.progressThumV.progress = 0
-            self?.isEndPlay = true
-            //            self?.currentPlayRecordIndex = -1
+            if (self?.currItemModelIndex ?? 0) < ((self?.itemModels.count ?? 0) - 1) {
+                self?.collectionView.setContentOffset(CGPoint(x: CGFloat((self?.currItemModelIndex ?? 0) + 1) * (self?.collectionView.frame.width ?? 0), y: 0), animated: true)
+            } else {
+                self?.isNormalPlaying = false
+                //            self?.assetPlayer?.seek(to: CMTime.zero)
+                //            self?.progressThumV.progress = 0
+                self?.isEndPlay = true
+                //            self?.currentPlayRecordIndex = -1
+                self?.recordBtn.isHidden = true
+            }
             if self?.isRecording ?? false {
                 self?.endRecord()
             }
-            self?.recordBtn.isHidden = true
         }
     }
 
@@ -1232,9 +1239,9 @@ public class BFRecordScreenController: BFBaseViewController {
     func periodicTimeObserver(item: AVPlayerItem?, time: CMTime) {
         // 进度监控
         if itemModels[currItemModelIndex].mediaType == .VIDEO {
-            currentAssetProgress = CMTime(seconds: time.seconds, preferredTimescale: 1000)
-            BFLog(1, message: "curr:\(CMTimeGetSeconds(currentAssetProgress))")
             if CMTimeGetSeconds(item?.duration ?? CMTime.zero) > 0 {
+                currentAssetProgress = CMTime(seconds: time.seconds, preferredTimescale: 1000)
+                BFLog(1, message: "curr:\(CMTimeGetSeconds(currentAssetProgress))")
                 DispatchQueue.main.async { [weak self] in
                     self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(time).formatDurationToHMS())
                     let su = !(self?.isDragingProgressSlder ?? false) || (self?.isRecording ?? false && self?.isNormalPlaying ?? false)
@@ -1287,7 +1294,7 @@ public class BFRecordScreenController: BFBaseViewController {
             guard let sself = self else {
                 return
             }
-            
+
             sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
                 vv.removeFromSuperview()
             }
@@ -1338,14 +1345,14 @@ extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
 
         let payload = dicResult?["payload"] as? [String: Any]
 
-        BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\((self.recorderManager.voiceModel?.startTime ?? 0.0))")
+        BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(recorderManager.voiceModel?.startTime ?? 0.0)")
         DispatchQueue.main.async {
-            //1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
-            
+            // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
+
             let newSubtitle = PQEditSubTitleModel()
             newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["begin_time"]) as? Int) ?? 0) / 1000.0
             // 单位:毫秒。
-            newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) +  Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
+            newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
             var showText = ((payload?["result"]) as? String) ?? ""
             if showText.count > subtitleMaxlength {
                 showText = showText.substring(to: subtitleMaxlength)
@@ -1411,10 +1418,15 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             currItemModelIndex = page
             let recordItem = itemModels[currItemModelIndex]
             // 重绘录音区域
-            drawOrUpdateRecordProgessLable()
+            indirectionView?.resetAllSubViews(items: recordItem.voiceStickers, percenWidth: recordItem.mediaType == .IMAGE ? progressThumV.thumbImageWidth / 2.0 : 0, totalDuration: recordItem.materialDuraion)
             // 更新缩略图
             progressThumV.recordItem = recordItem
             progressThumV.isHidden = false
+            // 重置指针
+            currentAssetProgress = CMTime(seconds: 0, preferredTimescale: 1000)
+            // 重置播放器
+            assetPlayer?.seek(to: CMTime.zero)
+            recordPlayer?.seek(to: CMTime.zero)
             if recordItem.mediaType == .VIDEO {
                 let currCell: BFImageCoverViewCell? = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell
                 setAudioPlay(item: recordItem.playItem)

+ 61 - 21
BFRecordScreenKit/Classes/RecordScreen/View/BFIndirectionProgressView.swift

@@ -16,6 +16,7 @@ class BFIndirectionProgressView: UIView {
     var progressHeight: CGFloat = 6
     var percenWidth: CGFloat = 0
     var totalDuration: Float64 = 0
+    var currentItem: UIView? // 当前的Item
 
     override private init(frame: CGRect) {
         super.init(frame: frame)
@@ -25,6 +26,7 @@ class BFIndirectionProgressView: UIView {
         super.init(frame: frame)
         self.indirecColor = indirecColor
         self.themeColor = themeColor
+        self.totalDuration = totalDuration
         self.progressHeight = progressHeight
         self.percenWidth = percenWidth
         if self.percenWidth <= 0, totalDuration > 0 {
@@ -36,52 +38,90 @@ class BFIndirectionProgressView: UIView {
         fatalError("init(coder:) has not been implemented")
     }
 
-    func updateProgressViews(items: [PQVoiceModel]) {
+    /// 重绘view
+    /// - Parameter items: <#items description#>
+    func resetAllSubViews(items: [PQVoiceModel]?, percenWidth: CGFloat = 0, totalDuration: Float64) {
+        self.totalDuration = totalDuration
+        self.percenWidth = percenWidth
+        if self.percenWidth <= 0, totalDuration > 0 {
+            self.percenWidth = frame.width / totalDuration
+        }
         subviews.forEach { vv in
             vv.removeFromSuperview()
         }
-        items.forEach { model in
-            createItemView(minX: model.startTime * percenWidth, width: (model.endTime - model.startTime) * percenWidth)
+        items?.forEach { model in
+            _ = createItemView(minX: model.startTime * percenWidth, width: (model.endTime - model.startTime) * percenWidth)
         }
     }
 
-    func setProgress(index: Int, start: CGFloat = 0, progress: Float64) {
-        if subviews.count > index {
-            BFLog(message: "设置进度-\(index),\(progress),\(progress * percenWidth)")
-            subviews[index].frame.size.width = progress * percenWidth
-        } else {
-            BFLog(message: "设置进度-添加一个录音:\(index),\(progress),\(start * percenWidth)")
-            createItemView(minX: start * percenWidth)
+    /// 设置进度
+    /// - Parameters:
+    ///   - _: <#_ description#>
+    ///   - start: <#start description#>
+    ///   - progress: <#progress description#>
+    func setProgress(start: CGFloat = 0, progress: Float64) {
+        BFLog(message: "录音进度--指示器Indir:progress=\(progress),duration=\(totalDuration),w=\(frame.width),perW=\(percenWidth),totalW:\(progress * percenWidth)")
+        detectionAndCreateItem(start: start, progress: progress)
+        currentItem?.frame.size.width = progress < 0 ? 0 : progress * percenWidth
+        BFLog(message: "当前view:\(String(describing: currentItem))")
+    }
+
+    /// 检测并创建item
+    /// - Parameter start: <#start description#>
+    func detectionAndCreateItem(start: CGFloat = 0, progress: Float64) {
+        if currentItem == nil {
+            currentItem = detectionItem(start: start, progress: progress)
         }
-        
     }
 
-    func currentItem(start: CGFloat = 0, progress: Float64) -> UIView {
+    /// 检测当前view
+    /// - Parameters:
+    ///   - start: <#start description#>
+    ///   - progress: <#progress description#>
+    /// - Returns: <#description#>
+    func detectionItem(start: CGFloat = 0, progress: Float64) -> UIView {
         let newRange = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), end: CMTime(seconds: start + progress, preferredTimescale: 1000))
         var currentIndex: Int?
-        for (index,item) in subviews.enumerated() {
+        for (index, item) in subviews.enumerated() {
             let originRange = CMTimeRange(start: CMTime(seconds: item.frame.minX / percenWidth, preferredTimescale: 1000), end: CMTime(seconds: item.frame.width / percenWidth, preferredTimescale: 1000))
             if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
                 currentIndex = index
-//                if
                 break
             }
         }
         if currentIndex != nil {
+            BFLog(message: "检测存在重新创建")
             return subviews[currentIndex!]
         } else {
-           return createItemView(minX: start * percenWidth)
+            BFLog(message: "检测不存在重新创建:\(start)")
+            return createItemView(minX: start * percenWidth)
         }
     }
-    
-    func deleteItem(index: Int) {
-        subviews[index].removeFromSuperview()
+
+    /// 录制结束后重制当前item
+    func resetCurrentItem(start: CGFloat, end: CGFloat) {
+        currentItem?.frame.origin.x = start * percenWidth
+        currentItem?.frame.size.width = (end - start) * percenWidth
+        currentItem = nil
+    }
+
+    /// 删除某个view
+    /// - Parameter index: <#index description#>
+    func deleteItem(index: Int = 0) {
+        if index >= 0, index < subviews.count {
+            subviews[index].removeFromSuperview()
+        }
     }
 
-    func createItemView(minX: CGFloat, width: CGFloat = 0, indirec: Bool = false)  -> UIView{
-        let lineV = UIView(frame: CGRect(x: minX, y: 0, width: width, height: progressHeight))
+    /// 创建一个view
+    /// - Parameters:
+    ///   - minX: <#minX description#>
+    ///   - width: <#width description#>
+    ///   - indirec: <#indirec description#>
+    /// - Returns: <#description#>
+    func createItemView(minX: CGFloat, width: CGFloat = 0, indirec: Bool = false) -> UIView {
+        let lineV = UIView(frame: CGRect(x: minX < 0 ? 0 : minX, y: 0, width: width, height: progressHeight))
         lineV.backgroundColor = indirec ? indirecColor : themeColor
-        lineV.tag = indirec ? 1 : 2
         addSubview(lineV)
         return lineV
     }

+ 5 - 4
BFRecordScreenKit/Classes/RecordScreen/View/BFVideoThumbProgressView.swift

@@ -182,12 +182,13 @@ class BFVideoThumbProgressView: UIView {
     /// 更新进度
     /// - Parameter progress: <#progress description#>
     func updateProgress(progress: Double = 0) {
-        if  progressView.contentSize.width <= 0 {
+        if progressView.contentSize.width <= 0 {
             return
         }
         if recordItem?.mediaType == .VIDEO {
             if let second = recordItem?.videoAsset?.duration.seconds, second > 0 {
                 let w = progressView.contentSize.width - width
+                BFLog(message: "录音进度--指示器:progress=\(progress),duration=\(second),w=\(w),perW=\(Double(w) / second),totalW:\(progress * Double(w) / second)")
                 progressView.contentOffset = CGPoint(x: progress * Double(w) / second, y: 0)
             }
         } else if recordItem?.mediaType == .IMAGE {
@@ -208,10 +209,10 @@ extension BFVideoThumbProgressView: UIScrollViewDelegate {
                 dragScrollProgressHandle?(false, Float(dur))
             }
         } else if recordItem?.mediaType == .IMAGE {
-            if scrollView.contentOffset.x > CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0 {
-                scrollView.contentOffset = CGPoint(x: CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0, y: 0)
-            }
             if isDrag {
+                if scrollView.contentOffset.x > CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0 {
+                    scrollView.contentOffset = CGPoint(x: CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0, y: 0)
+                }
                 let dur = scrollView.contentOffset.x / (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0)
                 dragScrollProgressHandle?(false, Float(dur))
             }