Sfoglia il codice sorgente

1.image record play

wenweiwei 3 anni fa
parent
commit
64d20928f6

+ 38 - 24
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -32,8 +32,6 @@ public class BFRecordScreenController: BFBaseViewController {
 
     public var assets = [PHAsset]()
     var currItemModelIndex = 0
-    // 当前图片录音播放位置
-    var currImagePlayIndex = 0
     public var itemModels = [BFRecordItemModel]()
     // add by ak 当前的显示的字幕位置
     var showSubtitleIndex = 0
@@ -66,6 +64,8 @@ public class BFRecordScreenController: BFBaseViewController {
     }
 
     var currentAssetProgress: CMTime = .zero // 当前素材播放的进度
+    // 播放器开始播放时间
+    var recordStartPlayTime: CMTime = .zero
     var recordStartTime: Double = 0 // 录制开始时间
     var pauseTime: Double = 0 // 停止无操作的时间点
 
@@ -105,6 +105,17 @@ public class BFRecordScreenController: BFBaseViewController {
         manager.endRecordHandle = { [weak self] model, _ in
             if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
                 // 加入到语音数组里
+                // 注:如果是图片则在录制结束后纠正当前进度
+                if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
+                    var duration: Double = 0
+                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
+                        if tempModel.endTime < sself.currentAssetProgress.seconds {
+                            duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
+                        }
+                    }
+                    duration = duration + (Double(model.duration ?? "0") ?? 0)
+                    sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
+                }
                 model.endTime = sself.currentAssetProgress.seconds
 
                 let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
@@ -134,12 +145,10 @@ public class BFRecordScreenController: BFBaseViewController {
                 sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
                 if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
                     var duration: Double = 0
-                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
-                        duration = duration + (Double(model.duration ?? "0") ?? 0)
+                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
+                        duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
                     }
                     sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
-                    sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
-                    model.endTime = sself.currentAssetProgress.seconds
                     self?.isEndPlay = true
                 }
                 DispatchQueue.main.async { [weak self] in
@@ -147,6 +156,7 @@ public class BFRecordScreenController: BFBaseViewController {
                     self?.changeWithDrawBtnLayout(true)
                     // 注:在录制结束时矫正当前位置,避免跟指针无法对其
                     self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
+                    self?.progressThumV.progress = self?.currentAssetProgress.seconds ?? 0
                     self?.deleteRecordBtn.isHidden = true
                     self?.recordBtn.isHidden = false
                 }
@@ -762,12 +772,12 @@ public class BFRecordScreenController: BFBaseViewController {
 
         let model = PQVoiceModel()
         // 开始时间
-        model.startTime = itemModels[currItemModelIndex].mediaType == .IMAGE ? itemModels[currItemModelIndex].materialDuraion : currentAssetProgress.seconds
+        model.startTime = currentAssetProgress.seconds
         model.volume = 100
         recorderManager.voiceModel = model
         recorderManager.startRecord(index: 1)
         if recordStartTime <= 0 {
-            recordStartTime = itemModels[currItemModelIndex].mediaType == .IMAGE ? itemModels[currItemModelIndex].materialDuraion : currentAssetProgress.seconds
+            recordStartTime = currentAssetProgress.seconds
         }
         // 添加撤销记录点
         events.append(WithDrawModel(type: 2, timestamp: model.startTime))
@@ -1061,15 +1071,18 @@ public class BFRecordScreenController: BFBaseViewController {
         if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
             return
         }
-
+        let type = itemModels[currItemModelIndex].mediaType
         let (shouldPlayRecordIndex, recordedAudio) = itemModels[currItemModelIndex].voiceStickers.enumerated().first { model in
-            model.1.endTime > CMTimeGetSeconds(currentT)
+            if type == .IMAGE {
+                return model.1.startTime >= CMTimeGetSeconds(currentT)
+            } else {
+               return model.1.endTime > CMTimeGetSeconds(currentT)
+            }
         } ?? (-1, nil)
 
         guard let recordedAudio = recordedAudio else {
             return
         }
-
         BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime), \(recordedAudio.wavFilePath ?? "")")
 
         // 创建播放器
@@ -1183,7 +1196,7 @@ public class BFRecordScreenController: BFBaseViewController {
         //        movie?.cancelProcessing()
         assetPlayer?.pause()
         recordPlayer?.pause()
-
+        recordStartPlayTime = CMTime.zero
         pauseTime = currentAssetProgress.seconds
 
         assetPlayer?.seek(to: currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { _ in
@@ -1537,17 +1550,18 @@ public extension BFRecordScreenController {
                 return
             }
             isNormalPlaying = true
-            BFLog(message: "开始播放第\(currImagePlayIndex)段:\(itemModels[currItemModelIndex].voiceStickers[currImagePlayIndex].startTime)")
-            playRecord(at: CMTime(seconds: itemModels[currItemModelIndex].voiceStickers[currImagePlayIndex].startTime, preferredTimescale: 1000), periodicTimeObserver: { [weak self] currentT, currentItem in
-                BFLog(message: "播放第\(self?.currImagePlayIndex ?? 0)段进度:\(currentT),\(currentItem)")
+            playRecord(at: currentAssetProgress, periodicTimeObserver: { [weak self] currentT, currentItem in
+                BFLog(message: "播放第段进度:\(currentT),\(currentItem)")
                 self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
-            }, didPlayToEndTime: { [weak self] recordedAudio, currentItem in
-                BFLog(message: "播放第\(self?.currImagePlayIndex ?? 0)段结束:\(String(describing: recordedAudio)),\(String(describing: currentItem))")
-                if (recordedAudio?.endTime ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endTime ?? 0) {
+            }, didPlayToEndTime: { [weak self] recordItem, currentItem in
+                BFLog(message: "播放第段结束:\(String(describing: recordItem)),\(String(describing: currentItem))")
+                if (recordItem?.endTime ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endTime ?? 0) {
                     self?.isEndPlay = true
                     self?.pause()
                 } else {
-                    self?.currImagePlayIndex += 1
+                    self?.currentAssetProgress = CMTime(seconds: recordItem?.endTime ?? 0, preferredTimescale: 1000)
+                    // 当开始播放时重置录音播放起始时间
+                    self?.recordStartPlayTime = self?.currentAssetProgress ?? CMTime.zero
                     self?.imageRecordPlay()
                 }
             }) { [weak self] _, _ in
@@ -1563,14 +1577,14 @@ public extension BFRecordScreenController {
         if isRecord {
             currentAssetProgress = CMTime(seconds: itemModels[currItemModelIndex].materialDuraion + progress, preferredTimescale: 1000)
         } else {
-            currentAssetProgress = CMTime(seconds: progress, preferredTimescale: 1000)
+            currentAssetProgress = CMTime(seconds: recordStartPlayTime.seconds + progress, preferredTimescale: 1000)
         }
-        BFLog(message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress)")
+        BFLog(message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress),\(itemModels[currItemModelIndex].materialDuraion)")
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
             DispatchQueue.main.async { [weak self] in
-                self?.progreddL.text = String(format: "%@", ((self?.currentAssetProgress.seconds ?? 0) + (isRecord ? 0 : progress)).formatDurationToHMS())
-                self?.progressThumV.progress = ((self?.currentAssetProgress.seconds ?? 0) + (isRecord ? 0 : progress))
-                self?.updateSubtitle(time: CMTime(value: CMTimeValue((self?.currentAssetProgress.seconds ?? 0) + (isRecord ? 0 : progress)), timescale: 1))
+                self?.progreddL.text = String(format: "%@", (self?.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
+                self?.progressThumV.progress = (self?.currentAssetProgress.seconds ?? 0)
+                self?.updateSubtitle(time: self?.currentAssetProgress ?? CMTime.zero)
             }
         }
     }

+ 4 - 1
BFRecordScreenKit/Classes/RecordScreen/View/BFIndirectionProgressView.swift

@@ -41,7 +41,7 @@ class BFIndirectionProgressView: UIView {
     /// 重绘view
     /// - Parameter items: <#items description#>
     func resetAllSubViews(items: [PQVoiceModel]?, percenWidth: CGFloat = 0, totalDuration: Float64) {
-        frame = superview?.bounds ?? CGRect.zero
+        frame.size.width = superview?.bounds.width ?? 0
         self.totalDuration = totalDuration
         self.percenWidth = percenWidth
         if self.percenWidth <= 0, totalDuration > 0 {
@@ -62,6 +62,9 @@ class BFIndirectionProgressView: UIView {
     ///   - progress: <#progress description#>
     func setProgress(start: CGFloat = 0, progress: Float64) {
         BFLog(message: "录音进度--指示器Indir:progress=\(progress),duration=\(totalDuration),w=\(frame.width),perW=\(percenWidth),totalW:\(progress * percenWidth)")
+        if start * percenWidth >= frame.width {
+            frame.size.width = superview?.bounds.width ?? 0
+        }
         detectionAndCreateItem(start: start, progress: progress)
         currentItem?.frame.size.width = progress < 0 ? 0 : progress * percenWidth
         BFLog(message: "当前view:\(String(describing: currentItem))")

+ 2 - 1
BFRecordScreenKit/Classes/RecordScreen/View/BFVideoThumbProgressView.swift

@@ -162,7 +162,8 @@ class BFVideoThumbProgressView: UIView {
 
     func appendThumb(progress: Double = 0) {
         let count: Int = Int(progress / 2)
-        if recordItem?.mediaType == .IMAGE, thumbImgs.count < (count - 5) {
+        BFLog(message: "需要的图片个数:progress=\(progress),count=\(count)")
+        if recordItem?.mediaType == .IMAGE, (thumbImgs.count - 5) < count {
             guard let image = recordItem?.coverImg else {
                 return
             }