Browse Source

设定当前播放进度

harry 3 years ago
parent
commit
2387598f1b

+ 1 - 2
BFRecordScreenKit/Classes/BFRecordExport.swift.swift → BFRecordScreenKit/Classes/BFRecordExport.swift

@@ -1,5 +1,5 @@
 //
 //
-//  BFRecordExport.swift.swift
+//  BFRecordExport.swift
 //  BFRecordScreenKit
 //  BFRecordScreenKit
 //
 //
 //  Created by 胡志强 on 2021/11/25.
 //  Created by 胡志强 on 2021/11/25.
@@ -135,7 +135,6 @@ public class BFRecordExport {
                         let semaphore = DispatchSemaphore(value: 0)
                         let semaphore = DispatchSemaphore(value: 0)
                         PQPlayerViewModel.mergeAudios(urls: list) { completURL in
                         PQPlayerViewModel.mergeAudios(urls: list) { completURL in
                             audioUrl = completURL
                             audioUrl = completURL
-                            BFLog(1, message: "异步做同步")
                             semaphore.signal()
                             semaphore.signal()
                         }
                         }
                         _ = semaphore.wait(timeout: .now() + 5)
                         _ = semaphore.wait(timeout: .now() + 5)

+ 17 - 15
BFRecordScreenKit/Classes/BFRecordScreenController.swift

@@ -29,6 +29,7 @@ public class BFRecordScreenController: BFBaseViewController {
             playBtn.isSelected = isNormalPlaying
             playBtn.isSelected = isNormalPlaying
         }
         }
     }
     }
+    var currentAssetProgress : CMTime = .zero   // 当前素材播放的进度
     // 视频素材
     // 视频素材
     public var avasset:AVURLAsset?
     public var avasset:AVURLAsset?
     public var recordList:[PQVoiceModel] = [PQVoiceModel]()
     public var recordList:[PQVoiceModel] = [PQVoiceModel]()
@@ -56,20 +57,22 @@ public class BFRecordScreenController: BFBaseViewController {
             
             
         }
         }
         manager.endRecordHandle = {[weak self] (isTimeout, model) in
         manager.endRecordHandle = {[weak self] (isTimeout, model) in
-            if FileManager.default.fileExists(atPath: model?.wavFilePath ?? ""){
+            if let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
                 // 加入到语音数组里
                 // 加入到语音数组里
-                model?.endTime = CMTimeGetSeconds(self?.assetPlayer?.currentItem?.currentTime() ?? CMTime.zero)
+                let ass = AVURLAsset(url: URL(fileURLWithPath: model.wavFilePath))
+                
+                model.endTime = model.startTime + CMTimeGetSeconds(ass.duration)
                 while let m = self?.recordList.last{
                 while let m = self?.recordList.last{
-                    if model!.startTime < m.startTime {
+                    if model.startTime < m.startTime {
                         self?.recordList.removeLast()
                         self?.recordList.removeLast()
-                    }else if m.endTime > model!.startTime {
-                        m.endTime = model!.startTime
+                    }else if m.endTime > model.startTime {
+                        m.endTime = model.startTime
                     }else{
                     }else{
                         break
                         break
                     }
                     }
                 }
                 }
-                BFLog(1, message: "添加录音文件:\(model?.startTime) -- \(model?.endTime)")
-                self?.recordList.append(model!)
+                BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
+                self?.recordList.append(model)
                 self?.drawOrUpdateRecordProgessLable()
                 self?.drawOrUpdateRecordProgessLable()
             }
             }
             
             
@@ -243,16 +246,14 @@ public class BFRecordScreenController: BFBaseViewController {
         }
         }
         
         
         progessSildeBackV.snp.makeConstraints { make in
         progessSildeBackV.snp.makeConstraints { make in
-            make.left.equalTo(closeBtn.snp.right).offset(10)
-            make.right.equalTo(nextBtn.snp.left).offset(-10)
+            make.left.equalTo(closeBtn.snp.right).offset(16)
+            make.right.equalTo(nextBtn.snp.left).offset(-16)
             make.centerY.equalTo(closeBtn)
             make.centerY.equalTo(closeBtn)
             make.height.equalTo(8)
             make.height.equalTo(8)
         }
         }
         
         
         progessSilde.snp.makeConstraints { make in
         progessSilde.snp.makeConstraints { make in
-            make.left.equalTo(closeBtn.snp.right).offset(16)
-            make.right.equalTo(nextBtn.snp.left).offset(-16)
-            make.centerY.equalTo(progessSildeBackV)
+            make.left.right.centerY.equalTo(progessSildeBackV)
             make.height.equalTo(20)
             make.height.equalTo(20)
         }
         }
         
         
@@ -307,7 +308,7 @@ public class BFRecordScreenController: BFBaseViewController {
         pause()
         pause()
 
 
         let model = PQVoiceModel()
         let model = PQVoiceModel()
-        model.startTime = CMTimeGetSeconds(assetPlayer?.currentItem?.currentTime() ?? CMTime.zero)
+        model.startTime = CMTimeGetSeconds(self.currentAssetProgress)
         recorderManager.voiceModel? = model
         recorderManager.voiceModel? = model
         recorderManager.startRecord(index: recordList.count)
         recorderManager.startRecord(index: recordList.count)
         movie?.startProcessing()
         movie?.startProcessing()
@@ -434,8 +435,8 @@ public class BFRecordScreenController: BFBaseViewController {
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 if currentPlayRecordIndex == -1 {
                 if currentPlayRecordIndex == -1 {
                     let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
                     let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
-                    recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
-                        if finished {
+                    recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: {[weak self] finished in
+                        if finished && (self?.isNormalPlaying ?? false) {
                             DispatchQueue.main.async {[weak self] in
                             DispatchQueue.main.async {[weak self] in
                                 self?.recordPlayer?.play()
                                 self?.recordPlayer?.play()
                             }
                             }
@@ -580,6 +581,7 @@ public class BFRecordScreenController: BFBaseViewController {
             assetPlayer = AVPlayer(playerItem: item)
             assetPlayer = AVPlayer(playerItem: item)
             avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
             avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
              //    进度监控
              //    进度监控
+                self?.currentAssetProgress = time
                 BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
                 BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
                 if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
                 if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
                     DispatchQueue.main.async { [weak self] in
                     DispatchQueue.main.async { [weak self] in