Ver código fonte

1.拆分图片/视频录制播放

wenweiwei 3 anos atrás
pai
commit
605e1d56c0

+ 93 - 49
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -121,8 +121,9 @@ public class BFRecordScreenController: BFBaseViewController {
     // 录音管理器
     var recorderManager: BFVoiceRecordManager?
 
-    var NeoNuiAPPID:String?
-    var NeoNuiToken:String?
+    var NeoNuiAPPID: String?
+    var NeoNuiToken: String?
+
     // MARK: - 视图参数
 
     var beginOnStartBtn: Bool = false
@@ -307,7 +308,7 @@ public class BFRecordScreenController: BFBaseViewController {
             BFLog(1, message: "haveSpeak is:\(haveSpeak),noHaveSpeak is:\(noHaveSpeak)")
             self?.haveSpeakVolume = haveSpeak / 100.0
             self?.noSpeakVolume = noHaveSpeak / 100.0
-            
+
 //            self?.assetPlayer?.volume = self?.noSpeakVolume
         }
 
@@ -407,7 +408,6 @@ public class BFRecordScreenController: BFBaseViewController {
             BFLog(message: "nls appkey is \(appkey), token is \(token)")
             self?.NeoNuiAPPID = appkey
             self?.NeoNuiToken = token
-        
         }
         recorderManager = BFVoiceRecordManager()
 
@@ -466,7 +466,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
 
                 var deletedVoices = [(PQVoiceModel, Int)]()
-                //要删除的字幕
+                // 要删除的字幕
                 var deletedTitlesTemp = [(PQEditSubTitleModel, Int)]()
                 for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
                     let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
@@ -512,9 +512,8 @@ public class BFRecordScreenController: BFBaseViewController {
                     // 注:在录制结束时矫正当前位置,避免跟指针无法对其
                     if deletedVoices.count == 0 {
                         self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
-                    }else {
+                    } else {
                         self?.indirectionView?.resetAllSubViews(items: recordItem.voiceStickers, percenWidth: recordItem.mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2.0 : 0, totalDuration: recordItem.materialDuraion)
-
                     }
                     // 矫正进度
                     self?.resetCurrentProgress()
@@ -522,7 +521,7 @@ public class BFRecordScreenController: BFBaseViewController {
                     self?.recordBtn.isHidden = (self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE && (self?.isEndPlay ?? false)) ? false : (self?.isEndPlay ?? false)
                 }
                 sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
-                BFLog(3, message: "重置播放index = \(sself.currentPlayRecordIndex)")
+                BFLog(3, message: "重置播放index-\(#function) = \(sself.currentPlayRecordIndex)")
                 // 重置录制开始时间
                 sself.recordStartTime = 0
             }
@@ -826,27 +825,25 @@ public class BFRecordScreenController: BFBaseViewController {
 
     /// 删除指定段落的所有字幕 数据
     /// - Parameter voiceModel: 删除的音频数据
-    func deleteTitles(voiceModel: PQVoiceModel) -> [(PQEditSubTitleModel, Int)]{
+    func deleteTitles(voiceModel: PQVoiceModel) -> [(PQEditSubTitleModel, Int)] {
         BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除前:\(itemModels[currItemModelIndex].titleStickers.count) model.startTime: \(voiceModel.startTime) model.end: \(voiceModel.endTime)")
-        var deleteTemp =  [(PQEditSubTitleModel, Int)]()
-     
-        for (index,title) in itemModels[currItemModelIndex].titleStickers.enumerated() {
+        var deleteTemp = [(PQEditSubTitleModel, Int)]()
+
+        for (index, title) in itemModels[currItemModelIndex].titleStickers.enumerated() {
             if title.timelineIn >= voiceModel.startTime, title.timelineOut <= voiceModel.endTime {
-                deleteTemp.append((title,index))
+                deleteTemp.append((title, index))
             }
-            //从原数组中删除
-            if let index = itemModels[currItemModelIndex].titleStickers.firstIndex(of: title){
+            // 从原数组中删除
+            if let index = itemModels[currItemModelIndex].titleStickers.firstIndex(of: title) {
                 itemModels[currItemModelIndex].titleStickers.remove(at: index)
-                
             }
-
         }
         BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除后:\(itemModels[currItemModelIndex].titleStickers.count)")
 
         // 清空字幕UI
         subtitleLabel.text = ""
         subtitleLabel.backgroundColor = UIColor.clear
-        
+
         return deleteTemp
     }
 
@@ -877,7 +874,6 @@ public class BFRecordScreenController: BFBaseViewController {
             }
             searchStopAtRecordRange()
 
-           
             var event = WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)])
             event.deletedTittles = deleteTitles(voiceModel: model)
             events.append(event)
@@ -930,8 +926,6 @@ public class BFRecordScreenController: BFBaseViewController {
         }
         // 录制中不显示播放按钮
         (collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
-        
-        
     }
 
     @objc func endRecord() {
@@ -1027,7 +1021,7 @@ public class BFRecordScreenController: BFBaseViewController {
             }
             isDragingProgressSlder = false
             currentPlayRecordIndex = -1
-            BFLog(3, message: "重置播放index = \(currentPlayRecordIndex)")
+            BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
             hadPrepareToPlayRecord = false
             progressThumV.progress = jumpTime
 
@@ -1038,9 +1032,9 @@ public class BFRecordScreenController: BFBaseViewController {
             }
             searchStopAtRecordRange()
             let itemModel = itemModels[currItemModelIndex]
-            
+
             // 重置进度条
-            indirectionView?.resetAllSubViews(items: itemModels[currItemModelIndex].voiceStickers, percenWidth: itemModels[currItemModelIndex].mediaType == .IMAGE ? (progressThumV.thumbImageWidth ) / 2.0 : 0, totalDuration: itemModels[currItemModelIndex].materialDuraion)
+            indirectionView?.resetAllSubViews(items: itemModels[currItemModelIndex].voiceStickers, percenWidth: itemModels[currItemModelIndex].mediaType == .IMAGE ? progressThumV.thumbImageWidth / 2.0 : 0, totalDuration: itemModels[currItemModelIndex].materialDuraion)
 
             // 如果是图片需重置播放按钮
             if itemModel.mediaType == .IMAGE {
@@ -1074,7 +1068,7 @@ public class BFRecordScreenController: BFBaseViewController {
         changeProgress(progress: sender.value)
         isDragingProgressSlder = false
         currentPlayRecordIndex = -1
-        BFLog(3, message: "重置播放index = \(currentPlayRecordIndex)")
+        BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
         hadPrepareToPlayRecord = false
     }
 
@@ -1088,6 +1082,7 @@ public class BFRecordScreenController: BFBaseViewController {
         currentPlayRecordIndex = -1
         hadPrepareToPlayRecord = false
         BFLog(1, message: "isDragingProgressSlder : \(isDragingProgressSlder)")
+        BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
         searchStopAtRecordRange(needAdsorb: true)
         changeWithDrawBtnLayout(false)
         isEndPlay = (progress == 1)
@@ -1197,7 +1192,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
     // MARK: - 音视频处理
 
-    func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
+    func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed _: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
         if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
             return
         }
@@ -1232,7 +1227,7 @@ public class BFRecordScreenController: BFBaseViewController {
             currentPlayRecordIndex = -1
             hadPrepareToPlayRecord = false
             BFLog(1, message: "录音播放器初始化(有时候不准)")
-
+            BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
             NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self, weak recordedAudio] _ in
                 self?.hadPrepareToPlayRecord = false
                 self?.currentPlayRecordIndex = -1
@@ -1245,7 +1240,7 @@ public class BFRecordScreenController: BFBaseViewController {
             avplayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
                 BFLog(3, message: "当前播放---\(time),\(time.seconds)")
                 if CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero) <= 0 {
-                    BFLog(message: "重新更新开始播放进度\(#function)-\((self?.currenStartPlayTime.seconds ?? 0))")
+                    BFLog(message: "重新更新开始播放进度\(#function)-\(self?.currenStartPlayTime.seconds ?? 0)")
                     self?.currenStartPlayTime = time
                 }
                 let progressTime = CMTime(seconds: CMTimeGetSeconds(time) - CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero), preferredTimescale: 1000)
@@ -1253,12 +1248,25 @@ public class BFRecordScreenController: BFBaseViewController {
                 periodicTimeObserver(progressTime, newItem)
             } as? NSKeyValueObservation
         }
-        
-//        if recordPlayer?.currentItem?.duration.timescale == 0 {
-//            BFLog(1, message: "时间timescale  == 0")
-//            playFailed((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), recordPlayer?.currentItem)
-//            return
-//        }
+        if itemModels[currItemModelIndex].mediaType == .VIDEO {
+            videoMaterialRecordPlay(at: currentT, shouldPlayRecordIndex: shouldPlayRecordIndex, recordedAudio: recordedAudio)
+        } else {
+            imageMaterialRecordPlay(at: currentT, shouldPlayRecordIndex: shouldPlayRecordIndex, recordedAudio: recordedAudio)
+        }
+        BFLog(1, message: "应当播放:\(shouldPlayRecordIndex), 当前播放:\(currentPlayRecordIndex)")
+    }
+
+    /// 视频素材录音的播放
+    /// - Parameters:
+    ///   - currentT: <#currentT description#>
+    ///   - shouldPlayRecordIndex: <#shouldPlayRecordIndex description#>
+    ///   - recordedAudio: <#recordedAudio description#>
+    func videoMaterialRecordPlay(at currentT: CMTime, shouldPlayRecordIndex: Int, recordedAudio: PQVoiceModel) {
+        //        if recordPlayer?.currentItem?.duration.timescale == 0 {
+        //            BFLog(1, message: "时间timescale  == 0")
+        //            playFailed((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), recordPlayer?.currentItem)
+        //            return
+        //        }
         synced(currentPlayRecordIndex) { [weak self] in
             guard let sself = self else {
                 return
@@ -1269,9 +1277,8 @@ public class BFRecordScreenController: BFBaseViewController {
                CMTimeGetSeconds(currentT) >= (recordedAudio.startTime - 0.1),
                CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
             {
-                if itemModels[currItemModelIndex].mediaType == .VIDEO && recordPlayer?.currentItem?.duration.timescale == 0{
+                if itemModels[currItemModelIndex].mediaType == .VIDEO, recordPlayer?.currentItem?.duration.timescale == 0 {
                     return
-
                 }
                 // 应当开始播放了
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
@@ -1291,13 +1298,51 @@ public class BFRecordScreenController: BFBaseViewController {
                     }
                     currentPlayRecordIndex = shouldPlayRecordIndex
                     hadPrepareToPlayRecord = true
+                    BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
                     BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
                 }
             }
         }
-        BFLog(1, message: "应当播放:\(shouldPlayRecordIndex), 当前播放:\(currentPlayRecordIndex)")
     }
 
+    
+    /// 图片素材录音的播放
+    /// - Parameters:
+    ///   - currentT: <#currentT description#>
+    ///   - shouldPlayRecordIndex: <#shouldPlayRecordIndex description#>
+    ///   - recordedAudio: <#recordedAudio description#>
+    func imageMaterialRecordPlay(at currentT: CMTime, shouldPlayRecordIndex: Int, recordedAudio: PQVoiceModel) {
+        synced(currentPlayRecordIndex) { [weak self] in
+            guard let sself = self else {
+                return
+            }
+            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startTime),\(CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2)")
+
+            if CMTimeGetSeconds(currentT) >= (recordedAudio.startTime - 0.1),
+               CMTimeGetSeconds(currentT) <= recordedAudio.endTime // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
+            {
+                // 应当开始播放了
+                // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
+                if sself.isNormalPlaying {
+                    let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
+                    sself.assetPlayer?.volume = sself.haveSpeakVolume
+
+                    DispatchQueue.main.async { [weak sself] in
+                        self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second * 1_000_000), timescale: 1_000_000), toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
+                            if finished, self?.isNormalPlaying ?? false {
+                                self?.recordPlayer?.play()
+                                //                                self?.deleteRecordBtn.isHidden = false
+                                //                                self?.recordBtn.isHidden = true
+                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds ?? 0)")
+                            }
+                        })
+                    }
+                    BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
+                    BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
+                }
+            }
+        }
+    }
     func play() {
         BFLog(1, message: "开始播放 \(currentAssetProgress.seconds)")
 
@@ -1307,7 +1352,7 @@ public class BFRecordScreenController: BFBaseViewController {
             assetPlayer?.seek(to: CMTime.zero)
             progressThumV.progress = 0
             currentPlayRecordIndex = -1
-            BFLog(3, message: "重置播放index = \(currentPlayRecordIndex)")
+            BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
             if itemModels[currItemModelIndex].mediaType == .VIDEO {
                 recordBtn.isHidden = false
             }
@@ -1458,7 +1503,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 currentAssetProgress = CMTime(seconds: time.seconds, preferredTimescale: 1000)
                 BFLog(1, message: "curr:\(CMTimeGetSeconds(currentAssetProgress))")
                 DispatchQueue.main.async { [weak self] in
-                    BFLog(message: "更新录音进度\(#function)-\((self?.currentAssetProgress.seconds ?? 0))")
+                    BFLog(message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
                     self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(time).formatDurationToHMS())
                     let su = !(self?.isDragingProgressSlder ?? false) || (self?.isRecording ?? false) || (self?.isNormalPlaying ?? false)
                     if su {
@@ -1510,7 +1555,7 @@ public class BFRecordScreenController: BFBaseViewController {
             if let duration = assetPlayer?.currentItem?.duration {
                 currentAssetProgress = CMTime(value: CMTimeValue(newProgress * Float(CMTimeGetSeconds(duration)) * 1000), timescale: 1000)
                 DispatchQueue.main.async { [weak self] in
-                    BFLog(message: "更新录音进度\(#function)-\((self?.currentAssetProgress.seconds ?? 0))")
+                    BFLog(message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
                     self!.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
                 }
                 assetPlayer!.seek(to: currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000)) { _ in
@@ -1519,12 +1564,12 @@ public class BFRecordScreenController: BFBaseViewController {
         } else {
             currentAssetProgress = isBack ? CMTime(value: CMTimeValue(progress * 1000), timescale: 1000) : CMTime(value: CMTimeValue(progress * Float(itemModels[currItemModelIndex].materialDuraion) * 1000), timescale: 1000)
             DispatchQueue.main.async { [weak self] in
-                BFLog(message: "更新录音进度\(#function)-\((self?.currentAssetProgress.seconds ?? 0))")
+                BFLog(message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
                 self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
             }
             BFLog(message: "progress = \(progress),currentAssetProgress = \(currentAssetProgress.seconds),materialDuraion = \(itemModels[currItemModelIndex].materialDuraion)")
-            
-            //add by ak 图片素材拖动指针时同时更新字幕显现数据 
+
+            // add by ak 图片素材拖动指针时同时更新字幕显现数据
             updateSubtitle(time: currentAssetProgress)
         }
     }
@@ -1574,7 +1619,7 @@ extension BFRecordScreenController: GPUImageMovieDelegate {
     public func didCompletePlayingMovie() {
         BFLog(1, message: "播放结束")
         currentPlayRecordIndex = -1
-        BFLog(3, message: "重置播放index = \(currentPlayRecordIndex)")
+        BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
     }
 }
 
@@ -1702,10 +1747,9 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
 
 /// 处理图片素材
 public extension BFRecordScreenController {
-    
     /// 播放录音
     /// - Parameter time: <#time description#>
-    func playRecord(time:CMTime) {
+    func playRecord(time: CMTime) {
         // 播放对应的录音音频
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
             if itemModels[currItemModelIndex].materialDuraion <= 0 || currentAssetProgress.seconds >= itemModels[currItemModelIndex].materialDuraion {
@@ -1719,7 +1763,7 @@ public extension BFRecordScreenController {
         }
         playRecord(at: time, periodicTimeObserver: { [weak self] currentT, currentItem in
             BFLog(1, message: "播放录音进度:\(currentT),\(currentItem)")
-            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE && (self?.isNormalPlaying ?? false) {
+            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE, self?.isNormalPlaying ?? false {
                 self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
             }
         }, didPlayToEndTime: { [weak self] recordInfo, currentItem in
@@ -1740,7 +1784,7 @@ public extension BFRecordScreenController {
                 }
             }
         }) { [weak self] _, _ in
-            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE  {
+            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
                 DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 3) {
                     self?.playRecord(time: self?.currentAssetProgress ?? CMTime.zero)
                 }
@@ -1765,7 +1809,7 @@ public extension BFRecordScreenController {
     /// 重置进度
     func resetCurrentProgress() {
         DispatchQueue.main.async { [weak self] in
-            BFLog(message: "更新录音进度\(#function)-\((self?.currentAssetProgress.seconds ?? 0))")
+            BFLog(message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
             self?.progreddL.text = String(format: "%@", (self?.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
             self?.progressThumV.progress = (self?.currentAssetProgress.seconds ?? 0)
             self?.updateSubtitle(time: self?.currentAssetProgress ?? CMTime.zero)