Jelajahi Sumber

Merge branch 'master' of https://git.yishihui.com/iOS/BFRecordScreenKit
合并代码

jsonwang 3 tahun lalu
induk
melakukan
8d791729f8

+ 37 - 26
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -454,7 +454,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
         // 录音进度
         recorderManager?.recorderProgrossHandle = { [weak self] progress in
-            BFLog(1, message: "curr:录音进度--\(progress),\(self?.isRecording ?? false)")
+//            BFLog(1, message: "curr:录音进度--\(progress),\(self?.isRecording ?? false)")
             self?.drawProgressIndication(progress: (progress.isNaN || progress.isInfinite) ? 0 : progress)
         }
 
@@ -469,18 +469,20 @@ public class BFRecordScreenController: BFBaseViewController {
             let header = dicResult?["header"] as? [String: Any]
             let payload = dicResult?["payload"] as? [String: Any]
 
-            BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
+            BFLog(1, message: "识别结果:) \((payload?["result"])!),taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\((((payload?["begin_time"]) as? Int) ?? 0)) ~ \((((payload?["time"]) as? Int) ?? 0)) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
             DispatchQueue.main.async {
                 // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startCMTime.seconds 为开始的录制的时间,开始和结束都要加上这个时差
 
                 let newSubtitle = PQEditSubTitleModel()
                 // 任务全局唯一ID,请记录该值,便于排查问题。 每次 startRecorder 和 stopRecoder 之间  task_Id都不会变化
                 newSubtitle.taskID = (header?["task_id"] as? String) ?? ""
-
+                BFLog(1, message: "url:\(URL(fileURLWithPath: audioFilePath ?? "b").deletingPathExtension().lastPathComponent), wavpath:\(URL(fileURLWithPath:self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent)")
                 // 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
-                if URL(fileURLWithPath: audioFilePath ?? "b").deletingPathExtension().lastPathComponent.contains(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent) {
+                if let audioUrl = audioFilePath, URL(fileURLWithPath: audioUrl).deletingPathExtension().lastPathComponent.contains(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent) {
                     newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startCMTime ?? .zero) + CMTime(seconds: Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0, preferredTimescale: 1000)
                     newSubtitle.timelineOut = (self?.recorderManager?.voiceModel?.startCMTime ?? .zero) + CMTime(seconds: Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0, preferredTimescale: 1000)
+                    newSubtitle.audioFilePath = audioUrl
+                    BFLog(1, message: "字幕按时回来")
 
                     if (newSubtitle.timelineIn - (self?.recorderManager?.voiceModel?.endCMTime ?? .zero)).seconds > 0.1 {
                         BFLog(1, message: "卡在录音尾巴上了1")
@@ -492,7 +494,8 @@ public class BFRecordScreenController: BFBaseViewController {
                     }) {
                         newSubtitle.timelineIn = voice.startCMTime + CMTime(seconds: Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0, preferredTimescale: 1000)
                         newSubtitle.timelineOut = voice.startCMTime + CMTime(seconds: Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0, preferredTimescale: 1000)
-
+                        newSubtitle.audioFilePath = voice.wavFilePath
+                        BFLog(1, message: "字幕没及时回来")
                         if (newSubtitle.timelineIn - voice.endCMTime).seconds > 0.1 {
                             BFLog(1, message: "卡在录音尾巴上了1")
                             newSubtitle.timelineIn = newSubtitle.timelineIn - CMTime(seconds: 0.1, preferredTimescale: 1000)
@@ -505,9 +508,9 @@ public class BFRecordScreenController: BFBaseViewController {
                     showText += "..."
                 }
                 newSubtitle.text = showText
-                newSubtitle.audioFilePath = audioFilePath ?? ""
+//                newSubtitle.audioFilePath = audioFilePath ?? ""
 
-                BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text: \(newSubtitle.text) 音频路径为:\(audioFilePath ?? "") 传入的地址:\(self?.recorderManager?.voiceModel?.wavFilePath ?? "aa")")
+                BFLog(1, message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn.seconds) timelineOut \(newSubtitle.timelineOut.seconds) text: \(newSubtitle.text) 音频路径为:\(audioFilePath ?? "bb") 传入的地址:\(self?.recorderManager?.voiceModel?.wavFilePath ?? "aa")")
 
                 newSubtitle.setting = self?.subtitleSettingView.subtitle.setting ?? BFSubTitileSettingModel()
 
@@ -907,9 +910,9 @@ public class BFRecordScreenController: BFBaseViewController {
         BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除前:\(itemModels[currItemModelIndex].titleStickers.count) model.startCMTime.seconds: \(voiceModel.startCMTime.seconds) model.end: \(voiceModel.endCMTime.seconds)")
         var deleteTemp = [(PQEditSubTitleModel, Int)]()
 
-        for (index, title) in itemModels[currItemModelIndex].titleStickers.enumerated() {
-            if title.audioFilePath == voiceModel.wavFilePath {
-                deleteTemp.append((title, index))
+        for (ind, sticker) in itemModels[currItemModelIndex].titleStickers.enumerated() {
+            if sticker.audioFilePath == voiceModel.wavFilePath {
+                deleteTemp.append((sticker, ind))
             }
         }
         // 从原数组中删除
@@ -936,6 +939,7 @@ public class BFRecordScreenController: BFBaseViewController {
             var event = WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)])
             event.deletedTittles = deleteTitles(voiceModel: model)
             events.append(event)
+            
             // 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
             if itemModels[currItemModelIndex].mediaType == .IMAGE {
                 let currDuration = model.endCMTime.seconds - model.startCMTime.seconds
@@ -1404,26 +1408,33 @@ public class BFRecordScreenController: BFBaseViewController {
             hadPrepareToPlayRecord = false
             BFLog(1, message: "录音播放器初始化(有时候不准)")
             BFLog(3, message: "重置播放index-\(#function) = \(currentPlayRecordIndex)")
-            NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self, weak recordedAudio] _ in
-                self?.hadPrepareToPlayRecord = false
-                self?.currentPlayRecordIndex = -1
-                self?.recordPlayer?.volume = 0
-                self?.assetPlayer?.volume = self?.noSpeakVolume ?? 0
-                didPlayToEndTime((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), newItem)
+            NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self] _ in
+                guard let sself = self else {
+                    return
+                }
+                sself.hadPrepareToPlayRecord = false
+                sself.currentPlayRecordIndex = -1
+                sself.recordPlayer?.volume = 0
+                sself.assetPlayer?.volume = sself.noSpeakVolume
+                didPlayToEndTime((shouldPlayRecordIndex, recordedAudio), newItem)
             }
             avplayerTimeObserver?.invalidate()
-            avplayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
-                BFLog(3, message: "当前播放---\(time),\(time.seconds),\(recordPlayer?.currentItem?.currentTime().seconds ?? 0),\(recordPlayer?.currentItem?.duration.seconds ?? 0)")
-                if CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero) <= 0 {
-                    BFLog(message: "重新更新开始播放进度\(#function)-\(self?.currenStartPlayTime.seconds ?? 0)")
-                    self?.currenStartPlayTime = time
+            avplayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer,weak assetPlayer] time in
+                guard let sself = self,let rPlay = recordPlayer,let aPlay = assetPlayer else {
+                    return
+                }
+                BFLog(3, message: "当前播放---\(time),\(time.seconds),\(rPlay.currentItem?.currentTime().seconds ?? 0),\(rPlay.currentItem?.duration.seconds ?? 0)")
+                if CMTimeGetSeconds(sself.currenStartPlayTime) <= 0 {
+                    BFLog(message: "重新更新开始播放进度\(#function)-\(sself.currenStartPlayTime.seconds)")
+                    sself.currenStartPlayTime = time
                 }
-                let progressTime = CMTime(seconds: CMTimeGetSeconds(time) - CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero), preferredTimescale: 1000)
+                let progressTime = CMTime(seconds: CMTimeGetSeconds(time) - CMTimeGetSeconds(sself.currenStartPlayTime), preferredTimescale: 1000)
                 BFLog(message: "progressTime进度\(#function)-\(progressTime.seconds)")
                 periodicTimeObserver(progressTime, newItem)
-                if (recordPlayer?.currentItem?.currentTime().seconds ?? 0) > (recordPlayer?.currentItem?.duration.seconds ?? 0) - 0.1 {
-                    recordPlayer?.volume = 0
-                    self?.assetPlayer?.volume = self?.noSpeakVolume ?? 0
+                if (rPlay.currentItem?.currentTime().seconds ?? 0) > (rPlay.currentItem?.duration.seconds ?? 0) - 0.1 {
+                    rPlay.volume = 0
+                    aPlay.volume = sself.noSpeakVolume
+                    didPlayToEndTime((shouldPlayRecordIndex, recordedAudio), newItem)
                 }
             } as? NSKeyValueObservation
         }
@@ -2046,7 +2057,7 @@ public extension BFRecordScreenController {
         } else {
             currentAssetProgress = CMTime(seconds: recordStartPlayTime.seconds + progress, preferredTimescale: 1000)
         }
-        BFLog(1, message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress),\(itemModels[currItemModelIndex].materialDuraion)")
+        BFLog(message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress.seconds),\(itemModels[currItemModelIndex].materialDuraion)")
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
             /// 重置进度
             resetCurrentProgress()