wenweiwei пре 3 година
родитељ
комит
ead4b33407

+ 34 - 47
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -19,7 +19,7 @@ struct WithDrawModel {
     var timestamp: Double
     var deletedVoices: [(PQVoiceModel, Int)]?
     // add by ak 保存删除的字幕数据用于恢复
-    var deletedTittles:[(PQEditSubTitleModel,Int)]?
+    var deletedTittles: [(PQEditSubTitleModel, Int)]?
 }
 
 public class BFRecordScreenController: BFBaseViewController {
@@ -32,7 +32,7 @@ public class BFRecordScreenController: BFBaseViewController {
     public var assets = [PHAsset]()
     var currItemModelIndex = 0
     public var itemModels = [BFRecordItemModel]()
-    //add by ak 当前的显示的字幕位置
+    // add by ak 当前的显示的字幕位置
     var showSubtitleIndex = 0
     //    var shouldPlayRecordIndex:Int = -1          // 当前应该播放的录音资源序号
     var currentPlayRecordIndex: Int = -1 // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
@@ -404,7 +404,7 @@ public class BFRecordScreenController: BFBaseViewController {
         _ = disablePopGesture()
 
         // add by ak 取 nsl token
-        BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
+        BFRecordScreenViewModel.getNlsAccessToken { token, appkey in
             BFLog(message: "nls appkey is \(appkey), token is \(token)")
 //            self?.speechTranscriberUtil = PQSpeechTranscriberUtil(token, appid: appkey)
         }
@@ -438,11 +438,11 @@ public class BFRecordScreenController: BFBaseViewController {
             try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
         }
 
-        avatarView.recordEndCallBack = { [weak self] _, materialsModel in
+        avatarView.recordEndCallBack = { _, materialsModel in
             BFLog(message: "新录制完成::::\(materialsModel?.locationPath ?? "")")
         }
 
-        audioSettingView.callBack = { [weak self] haveSpeak, noHaveSpeak in
+        audioSettingView.callBack = { haveSpeak, noHaveSpeak in
 
             BFLog(message: "haveSpeak is\(haveSpeak),noHaveSpeak is\(noHaveSpeak)")
         }
@@ -680,33 +680,26 @@ public class BFRecordScreenController: BFBaseViewController {
 
         audioSettingView.isHidden = false
     }
- 
+
     /// 删除指定段落的所有字幕 数据
     /// - Parameter voiceModel: 删除的音频数据
-    func deleteTitles(voiceModel:PQVoiceModel){
-          BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除前:\(itemModels[currItemModelIndex].titleStickers.count) model.startTime: \(voiceModel.startTime) model.end: \(voiceModel.endTime)")
-          for title in itemModels[currItemModelIndex].titleStickers {
-              if let index = itemModels[currItemModelIndex].titleStickers.firstIndex(of: title) {
-                  
-                  if title.timelineIn >= voiceModel.startTime && title.timelineOut <= voiceModel.endTime{
-                    
-                      //删除前数据存入缓存
-                      let sutbitle  = itemModels[currItemModelIndex].titleStickers[index]
-                      events.append(WithDrawModel(type: 4, timestamp: currentAssetProgress.seconds, deletedTittles: [(sutbitle,isStopAtRecordRange)]))
-                      
-                      itemModels[currItemModelIndex].titleStickers.remove(at: index)
-                      
-                  
- 
-                  }
-         
-              }
-          }
-         BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除后:\(itemModels[currItemModelIndex].titleStickers.count)")
- 
-          //清空字幕UI
-         subtitleLabel.text = ""
-        
+    func deleteTitles(voiceModel: PQVoiceModel) {
+        BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除前:\(itemModels[currItemModelIndex].titleStickers.count) model.startTime: \(voiceModel.startTime) model.end: \(voiceModel.endTime)")
+        for title in itemModels[currItemModelIndex].titleStickers {
+            if let index = itemModels[currItemModelIndex].titleStickers.firstIndex(of: title) {
+                if title.timelineIn >= voiceModel.startTime, title.timelineOut <= voiceModel.endTime {
+                    // 删除前数据存入缓存
+                    let sutbitle = itemModels[currItemModelIndex].titleStickers[index]
+                    events.append(WithDrawModel(type: 4, timestamp: currentAssetProgress.seconds, deletedTittles: [(sutbitle, isStopAtRecordRange)]))
+
+                    itemModels[currItemModelIndex].titleStickers.remove(at: index)
+                }
+            }
+        }
+        BFLog(message: "itemModels[currItemModelIndex].titleStickers  删除后:\(itemModels[currItemModelIndex].titleStickers.count)")
+
+        // 清空字幕UI
+        subtitleLabel.text = ""
     }
 
     @objc func deleteRecorded() {
@@ -716,9 +709,8 @@ public class BFRecordScreenController: BFBaseViewController {
             drawOrUpdateRecordProgessLable()
             searchStopAtRecordRange()
             events.append(WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)]))
-     
+
             deleteTitles(voiceModel: model)
-            
         }
     }
 
@@ -795,7 +787,6 @@ public class BFRecordScreenController: BFBaseViewController {
     @objc func withdrawAction() {
         pause()
         if let action = events.last {
- 
             var jumpTime = action.timestamp
             if action.type == 2 {
                 // 撤销录制
@@ -897,7 +888,7 @@ public class BFRecordScreenController: BFBaseViewController {
         let elems = itemModels[currItemModelIndex].voiceStickers.enumerated().filter { elem in
             elem.1.startTime <= self.currentAssetProgress.seconds && elem.1.endTime > self.currentAssetProgress.seconds
         }
-        
+
         isEndPlay = false
 
         if elems.count > 0 {
@@ -977,7 +968,7 @@ public class BFRecordScreenController: BFBaseViewController {
             return
         }
 
-        BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime),, \(recordedAudio.wavFilePath)")
+        BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime), \(recordedAudio.wavFilePath ?? "")")
 
         // 创建播放器
         if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString).lastPathComponent {
@@ -1004,7 +995,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 self?.currentPlayRecordIndex = -1
                 didPlayToEndTime(recordedAudio, newItem)
             }
-            _ = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) { [weak self] time in
+            _ = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) { time in
                 periodicTimeObserver(time, newItem)
             } as? NSKeyValueObservation
         }
@@ -1017,7 +1008,7 @@ public class BFRecordScreenController: BFBaseViewController {
             guard let self = self else {
                 return
             }
-            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(self.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale),\(CMTimeGetSeconds(currentT) >= recordedAudio.startTime),\(CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2)")
+            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(self.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startTime),\(CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2)")
 
             if !hadPrepareToPlayRecord,
                recordPlayer?.currentItem?.duration.timescale != 0,
@@ -1033,7 +1024,7 @@ public class BFRecordScreenController: BFBaseViewController {
                             if finished, self?.isNormalPlaying ?? false {
                                 self?.recordPlayer?.play()
 
-                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds)")
+                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds ?? 0)")
                             }
                         })
                     }
@@ -1062,17 +1053,13 @@ public class BFRecordScreenController: BFBaseViewController {
     }
 
     func play() {
-        
-        BFLog(1, message: "开始播放 \(currentAssetProgress.seconds) \(   AVAudioSession.sharedInstance().category)")
-        //add by ak 播放前设置 AVAudioSession 为播放状态
+        BFLog(1, message: "开始播放 \(currentAssetProgress.seconds) \(AVAudioSession.sharedInstance().category)")
+        // add by ak 播放前设置 AVAudioSession 为播放状态
         do {
             try AVAudioSession.sharedInstance().setCategory(.playback)
             try AVAudioSession.sharedInstance().setActive(true)
-        } catch {
+        } catch {}
 
-        }
-        
-        
         isNormalPlaying = true
         if isEndPlay {
             isEndPlay = false
@@ -1177,8 +1164,8 @@ public class BFRecordScreenController: BFBaseViewController {
                     // 播放对应的录音音频
                     self?.playRecord(at: time, periodicTimeObserver: { currentT, currentItem in
                         BFLog(message: "播放一段进度:\(currentT),\(currentItem)")
-                    }, didPlayToEndTime: { startT, currentItem in
-                        BFLog(message: "播放一段结束:\(startT),\(currentItem)")
+                    }, didPlayToEndTime: { startT, _ in
+                        BFLog(message: "播放一段结束:\(startT?.endTime ?? 0)")
                     }, playFailed: { _, _ in })
                 }
             } as? NSKeyValueObservation