Просмотр исходного кода

fix:开始录音时两次player play;
撤回的状态更新

harry 3 лет назад
Родитель
Сommit
ff4012d0bb

+ 0 - 1
BFRecordScreenKit/Classes/BFVoiceRecordManager.swift

@@ -21,7 +21,6 @@ class BFVoiceRecordManager {
     var voiceModel: PQVoiceModel?
 
     /// 录制音频。 index初衷是记录录音顺序,废弃了
-    ///
     func startRecord(index: Int) {
         recorderFilePath = exportAudiosDirectory
 

+ 49 - 36
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -75,7 +75,7 @@ public class BFRecordScreenController: BFBaseViewController {
     var playView : GPUImageView? // 视频展示视图
 
     // MARK: 行为参数
-
+    var movieIsProcessing = false
     var events = [WithDrawModel]() // 行为记录,方便撤销
     var isDragingProgressSlder : Bool = false // 是否在拖动进度条
     var isStopAtRecordRange = -1
@@ -769,35 +769,56 @@ public class BFRecordScreenController: BFBaseViewController {
         if recordStartTime <= 0 {
             recordStartTime = currentAssetProgress.seconds
         }
-        movie?.startProcessing()
-        assetPlayer?.volume = self.haveSpeakVolume
-        assetPlayer?.play()
+        // 添加撤销记录点
+        events.append(WithDrawModel(type: 2, timestamp: model.startTime))
+        
+//        DispatchQueue.main.async {[weak self] in
+//            let model = PQVoiceModel()
+//            model.startTime = self?.currentAssetProgress.seconds ?? 0
+//            model.volume = 100
+//            self?.recorderManager.voiceModel = model
+//            self?.recorderManager.startRecord(index: 1)
+//            if self?.recordStartTime ?? 0 <= 0 {
+//                self?.recordStartTime = self?.currentAssetProgress.seconds ?? 0
+//            }
+//            // 添加撤销记录点
+//            self?.events.append(WithDrawModel(type: 2, timestamp: model.startTime))
+//
+//        }
+        
+
         isRecording = true
 
         if !avatarView.isHidden {
             avatarView.beginRecord()
         }
 
-        // 添加撤销记录点
-        events.append(WithDrawModel(type: 2, timestamp: model.startTime))
 
 //        movie?.startProcessing()
 //        assetPlayer?.volume = 0
-        DispatchQueue.global().async { [weak self] in
-            guard let sself = self else {
-                return
-            }
-            sself.speechTranscriberUtil?.delegate = sself
-            sself.speechTranscriberUtil?.startTranscriber()
-            sself.speechTranscriberUtil?.currItemModelIndex = Int32(sself.currItemModelIndex)
-        }
+//        DispatchQueue.global().async { [weak self] in
+//            guard let sself = self else {
+//                return
+//            }
+//            sself.speechTranscriberUtil?.delegate = sself
+//            sself.speechTranscriberUtil?.startTranscriber()
+//            sself.speechTranscriberUtil?.currItemModelIndex = Int32(sself.currItemModelIndex)
+//        }
         if itemModels[currItemModelIndex].mediaType == .VIDEO {
+            
+            if !movieIsProcessing {
+                movie?.startProcessing()
+                movieIsProcessing = true
+            }
+            assetPlayer?.volume = 0
             assetPlayer?.play()
+            
             // 暂停状态
             (collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
         }
+        
         recorderManager.audioRecorder?.recorderProgross = { [weak self] progress in
-            BFLog(message: "录音进度--\(progress)")
+            BFLog(1, message: "curr:录音进度--\(progress)")
             if self?.indirectionView == nil {
                 self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
                 self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
@@ -965,7 +986,7 @@ public class BFRecordScreenController: BFBaseViewController {
             //  TODO: 停在了录音区间,显示删除按钮
             if fabs(elems[0].1.endTime - self.currentAssetProgress.seconds) < 0.5 {
                 BFLog(1, message: "吸附在录音结尾")
-                changeWithDrawBtnLayout(false)
+//                changeWithDrawBtnLayout(false)
                 changeProgress(progress: Float(elems[0].1.endTime / itemModels[currItemModelIndex].materialDuraion))
                 progressThumV.progress = elems[0].1.endTime
                 
@@ -976,7 +997,7 @@ public class BFRecordScreenController: BFBaseViewController {
             }else {
                 if fabs(elems[0].1.startTime - self.currentAssetProgress.seconds) < 0.5 {
                     BFLog(1, message: "吸附在录音开始")
-                    changeWithDrawBtnLayout(true)
+//                    changeWithDrawBtnLayout(true)
                     changeProgress(progress: Float(elems[0].1.startTime / itemModels[currItemModelIndex].materialDuraion))
                     progressThumV.progress = elems[0].1.startTime
                 }
@@ -1068,7 +1089,6 @@ public class BFRecordScreenController: BFBaseViewController {
                 if let playItem = player.currentItem {
                     NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
                     recordPlayer?.replaceCurrentItem(with: newItem)
-                    assetPlayer?.volume = self.noSpeakVolume
                 }
             } else {
                 recordPlayer = AVPlayer(playerItem: newItem)
@@ -1082,6 +1102,7 @@ public class BFRecordScreenController: BFBaseViewController {
             NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self, weak recordedAudio] _ in
                 self?.hadPrepareToPlayRecord = false
                 self?.currentPlayRecordIndex = -1
+                self?.assetPlayer?.volume = self?.noSpeakVolume ?? 0
                 didPlayToEndTime(recordedAudio, newItem)
             }
             _ = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { time in
@@ -1108,11 +1129,12 @@ public class BFRecordScreenController: BFBaseViewController {
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 if currentPlayRecordIndex == -1, self.isNormalPlaying {
                     let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
+                    self.assetPlayer?.volume = self.haveSpeakVolume
+
                     DispatchQueue.main.async { [weak self] in
                         self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second * 1_000_000), timescale: 1_000_000), toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
                             if finished, self?.isNormalPlaying ?? false {
                                 self?.recordPlayer?.play()
-                                self?.assetPlayer?.volume = self?.haveSpeakVolume ?? 0
                                 BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds ?? 0)")
                             }
                         })
@@ -1124,21 +1146,7 @@ public class BFRecordScreenController: BFBaseViewController {
             }
         }
         BFLog(1, message: "应当播放:\(shouldPlayRecordIndex), 当前播放:\(currentPlayRecordIndex)")
-        //        if let recordedAudio = recordedAudio {
-        //
-        //
-        //            if shouldPlayRecordIndex != currentPlayRecordIndex {
-        //                // 设置新的播放资源
-        //
-        ////                self.recordPlayer.delegate = self
-        //                self.recordPlayer.play()
-        //
-        //            } else {
-        //                // 更新播放进度
-        //                let second = CMTimeGetSeconds(duration) - recordedAudio.startTime
-        //                self.recordPlayer.seek(to: CMTime(seconds: second, preferredTimescale: 25), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000))
-        //            }
-        //        }
+
     }
 
     func play() {
@@ -1161,8 +1169,11 @@ public class BFRecordScreenController: BFBaseViewController {
             currentAssetProgress = CMTime.zero
         }
         if itemModels[currItemModelIndex].mediaType == .VIDEO {
-            assetPlayer?.volume = 1
-            movie?.startProcessing()
+            assetPlayer?.volume = self.noSpeakVolume
+            if !movieIsProcessing {
+                movie?.startProcessing()
+                movieIsProcessing = true
+            }
             assetPlayer?.play()
         } else {
             // 处理图片音频播放
@@ -1236,6 +1247,7 @@ public class BFRecordScreenController: BFBaseViewController {
         filter.addTarget(preView)
 
         movie?.startProcessing()
+        movieIsProcessing = true
     }
 
     func setAudioPlay(item: AVPlayerItem?) {
@@ -1306,6 +1318,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
     func cleanMovieTarget() {
         movie?.cancelProcessing()
+        movieIsProcessing = false
         movie?.targets().forEach { target in
             if let objc = target as? GPUImageOutput {
                 objc.removeAllTargets()