浏览代码

1.监听耳机拔出

wenweiwei 3 年之前
父节点
当前提交
aedf752759
共有 1 个文件被更改,包括 46 次插入31 次删除
  1. 46 31
      BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

+ 46 - 31
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -6,6 +6,8 @@
 //  Copyright © 2021 CocoaPods. All rights reserved.
 //
 
+import AVFAudio
+import AVFoundation
 import BFCommonKit
 import BFMediaKit
 import BFNetRequestKit
@@ -67,7 +69,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
     var currentAssetProgress: CMTime = .zero {
         didSet {
-            BFLog(3,message: "currentAssetProgress=\(currentAssetProgress.seconds)")
+            BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
         }
     } // 当前素材播放的进度
     // 播放器开始播放时间
@@ -423,6 +425,19 @@ public class BFRecordScreenController: BFBaseViewController {
         endRecord()
     }
 
+    /// 线路切换
+    /// - Parameter nofify: <#nofify description#>
+    @objc func routeChangeNofify(nofify: Notification) {
+        let routeChangeDic = nofify.userInfo
+        let routeChangeReason: AVAudioSession.RouteChangeReason? = AVAudioSession.RouteChangeReason(rawValue: UInt((routeChangeDic?[AVAudioSessionRouteChangeReasonKey] as? Int) ?? 0))
+        if routeChangeReason == .oldDeviceUnavailable {
+            let previousRoute = routeChangeDic?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription
+            if previousRoute?.outputs.first?.portType == .headphones {
+                playVideo(btn: playBtn)
+            }
+        }
+    }
+
     override public func viewWillAppear(_ animated: Bool) {
         super.viewWillAppear(animated)
         navigationController?.isNavigationBarHidden = true
@@ -440,7 +455,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
         // 进入活跃状态
         PQNotification.addObserver(self, selector: #selector(didBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil)
-
+        PQNotification.addObserver(self, selector: #selector(routeChangeNofify(nofify:)), name: AVAudioSession.routeChangeNotification, object: nil)
         // 进入非活跃状态
         PQNotification.addObserver(self, selector: #selector(willResignActive), name: UIApplication.willResignActiveNotification, object: nil)
 
@@ -476,14 +491,14 @@ public class BFRecordScreenController: BFBaseViewController {
             let header = dicResult?["header"] as? [String: Any]
             let payload = dicResult?["payload"] as? [String: Any]
 
-            BFLog(1, message: "识别结果:) \((payload?["result"])!),taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\((((payload?["begin_time"]) as? Int) ?? 0)) ~ \((((payload?["time"]) as? Int) ?? 0)) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
+            BFLog(1, message: "识别结果:) \((payload?["result"])!),taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\(((payload?["begin_time"]) as? Int) ?? 0) ~ \(((payload?["time"]) as? Int) ?? 0) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
             DispatchQueue.main.async {
                 // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startCMTime.seconds 为开始的录制的时间,开始和结束都要加上这个时差
 
                 let newSubtitle = PQEditSubTitleModel()
                 // 任务全局唯一ID,请记录该值,便于排查问题。 每次 startRecorder 和 stopRecoder 之间  task_Id都不会变化
                 newSubtitle.taskID = (header?["task_id"] as? String) ?? ""
-                BFLog(1, message: "url:\(URL(fileURLWithPath: audioFilePath ?? "b").deletingPathExtension().lastPathComponent), wavpath:\(URL(fileURLWithPath:self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent)")
+                BFLog(1, message: "url:\(URL(fileURLWithPath: audioFilePath ?? "b").deletingPathExtension().lastPathComponent), wavpath:\(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent)")
                 // 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
                 if let audioUrl = audioFilePath, URL(fileURLWithPath: audioUrl).deletingPathExtension().lastPathComponent.contains(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent) {
                     newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startCMTime ?? .zero) + CMTime(seconds: Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0, preferredTimescale: 1000)
@@ -507,7 +522,7 @@ public class BFRecordScreenController: BFBaseViewController {
                             BFLog(1, message: "卡在录音尾巴上了1")
                             newSubtitle.timelineIn = newSubtitle.timelineIn - CMTime(seconds: 0.1, preferredTimescale: 1000)
                         }
-                    }else {
+                    } else {
                         BFLog(1, message: "没有对应音频播放记录,出现错误!!!!!!")
                         return
                     }
@@ -949,7 +964,7 @@ public class BFRecordScreenController: BFBaseViewController {
             var event = WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)])
             event.deletedTittles = deleteTitles(voiceModel: model)
             events.append(event)
-            
+
             // 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
             if itemModels[currItemModelIndex].mediaType == .IMAGE {
                 let currDuration = model.endCMTime.seconds - model.startCMTime.seconds
@@ -1011,7 +1026,7 @@ public class BFRecordScreenController: BFBaseViewController {
         model.currIndex = currItemModelIndex
         model.volume = 100
         recorderManager?.voiceModel = model
-        BFLog(3,message: "开始录制-开始:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),\(model)")
+        BFLog(3, message: "开始录制-开始:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),\(model)")
         recorderManager?.startRecord()
         recorderManager?.audioRecorder?.startNeoNui(NeoNuiToken ?? "", appid: NeoNuiAPPID ?? "")
         isRecording = true
@@ -1045,10 +1060,10 @@ public class BFRecordScreenController: BFBaseViewController {
         beginOnStartBtn = false
 
         recordBtn.isEnabled = false
-        DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) {[weak self] in
+        DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) { [weak self] in
             self?.recordBtn.isEnabled = true
         }
-        
+
         isRecording = false
 //        progressThumV.progressView.isUserInteractionEnabled = true
 //        collectionView.isScrollEnabled = true
@@ -1074,7 +1089,6 @@ public class BFRecordScreenController: BFBaseViewController {
         if !avatarView.isHidden {
             avatarView.endRecord()
         }
-        
     }
 
     @objc func cancleRecord() {
@@ -1089,7 +1103,7 @@ public class BFRecordScreenController: BFBaseViewController {
     /// 不足一秒,主动取消录制
     /// - Parameter voiceModel: <#voiceModel description#>
     @objc func recordManagerCancelRecord(voiceModel: PQVoiceModel?) {
-        BFLog(3,message: "开始录制-取消:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),currIndex=\(voiceModel?.currIndex ?? 0),\(String(describing: voiceModel)),\(String(describing:recorderManager?.voiceModel))")
+        BFLog(3, message: "开始录制-取消:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),currIndex=\(voiceModel?.currIndex ?? 0),\(String(describing: voiceModel)),\(String(describing: recorderManager?.voiceModel))")
         if voiceModel != nil, currentAssetProgress.seconds - (recorderManager?.voiceModel?.startCMTime.seconds ?? 0) < 1.0 {
             cShowHUB(superView: nil, msg: "最短录制1秒")
         }
@@ -1243,6 +1257,7 @@ public class BFRecordScreenController: BFBaseViewController {
     @objc func playVideo(btn: UIButton) {
         if itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0 {
             BFLog(message: "图片没有录音无法播放")
+            btn.isSelected = true
             return
         }
         btn.isSelected = !btn.isSelected
@@ -1396,7 +1411,6 @@ public class BFRecordScreenController: BFBaseViewController {
     // MARK: - 音视频处理
 
     func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed _: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
-        
 //        if currentPlayRecordIndex >= 0 {
 //            if assetPlayer?.volume != haveSpeakVolume{
 //                assetPlayer?.volume = haveSpeakVolume
@@ -1406,23 +1420,23 @@ public class BFRecordScreenController: BFBaseViewController {
 //                assetPlayer?.volume = noSpeakVolume
 //            }
 //        }
-        
+
         if itemModels[currItemModelIndex].voiceStickers.first(where: { m in
-            return CMTimeCompare(m.startCMTime, currentT) <= 0 && CMTimeCompare(currentT, m.endCMTime) <= 0
-        }) != nil{
-            if assetPlayer?.volume != haveSpeakVolume{
+            CMTimeCompare(m.startCMTime, currentT) <= 0 && CMTimeCompare(currentT, m.endCMTime) <= 0
+        }) != nil {
+            if assetPlayer?.volume != haveSpeakVolume {
                 assetPlayer?.volume = haveSpeakVolume
             }
-        }else{
+        } else {
             if assetPlayer?.volume != noSpeakVolume {
                 assetPlayer?.volume = noSpeakVolume
             }
         }
-        
+
         if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
             return
         }
-        
+
         // 先排序,再查找下一个需要播放的录音
         let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
             m1.startCMTime.seconds < m2.startCMTime.seconds
@@ -1430,14 +1444,14 @@ public class BFRecordScreenController: BFBaseViewController {
         let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
             model.1.endCMTime.seconds > CMTimeGetSeconds(currentT)
         } ?? (-1, nil)
-        
+
         // 没找到,代表后边没有录音需求了
         guard let recordedAudio = recordedAudio, recordedAudio.wavFilePath.count > 0 else {
             BFLog(3, message: "未找到可播放录音")
             return
         }
         BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startCMTime.seconds) ~ \(recordedAudio.endCMTime.seconds), \(recordedAudio.wavFilePath ?? "")")
-        
+
         // 创建播放器
         if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
             let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
@@ -1471,7 +1485,7 @@ public class BFRecordScreenController: BFBaseViewController {
             }
             recordPlayerTimeObserver?.invalidate()
             recordPlayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
-                guard let sself = self,let rPlay = recordPlayer else {
+                guard let sself = self, let rPlay = recordPlayer else {
                     BFLog(3, message: "sself为空")
                     return
                 }
@@ -1607,7 +1621,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 movie?.startProcessing()
                 movieIsProcessing = true
             }
-            //add by ak 切换段落时会有一小段原素材的声音播放时先把视频原音量设置为0
+            // add by ak 切换段落时会有一小段原素材的声音播放时先把视频原音量设置为0
             assetPlayer?.volume = 0
             assetPlayer?.play()
         } else {
@@ -1626,14 +1640,14 @@ public class BFRecordScreenController: BFBaseViewController {
     func pause() {
         BFLog(1, message: "暂停播放")
         isNormalPlaying = false
-        
+
         // ---- 修复暂停播放回退问题
         avplayerTimeObserver?.invalidate()
         avplayerTimeObserver = nil
         recordPlayerTimeObserver?.invalidate()
         recordPlayerTimeObserver = nil
         // ----
-        
+
         subtitleBtn.isHidden = false
         soundSettingBtn.isHidden = false
         withDrawBtn.isHidden = false
@@ -1645,7 +1659,7 @@ public class BFRecordScreenController: BFBaseViewController {
         pauseTime = currentAssetProgress.seconds
         currentPlayRecordIndex = -1
         hadPrepareToPlayRecord = false
-        
+
         // 暂停状态
         playBtn.isSelected = (itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0)
     }
@@ -1976,10 +1990,11 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
 
     public func collectionView(_: UICollectionView, didSelectItemAt _: IndexPath) {}
 
-    public func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
+    public func scrollViewWillBeginDragging(_: UIScrollView) {
         BFLog(1, message: "开始滚动")
         recordBtn.isEnabled = false
     }
+
     public func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
         endScrollItem(scrollView)
     }
@@ -1993,7 +2008,7 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
     func endScrollItem(_ scrollView: UIScrollView) {
 //    public func scrollViewDidScroll(_ scrollView: UIScrollView) {
         BFLog(1, message: "滚动结束")
-        
+
         let page = Int((scrollView.contentOffset.x + scrollView.frame.width / 2) / scrollView.frame.width)
         if page != currItemModelIndex {
             // 切换素材时先把录制状态切为不可用,延迟可点,避免在缩略图未加载出来时即可录制
@@ -2034,10 +2049,10 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             // 重置播放器
             assetPlayer?.seek(to: CMTime.zero)
             recordPlayer?.seek(to: CMTime.zero)
-            
+
             if let voice = itemModels[page].voiceStickers.enumerated().first(where: { m in
                 m.1.startTime == 0
-            }){
+            }) {
                 currentPlayRecordIndex = voice.0
             }
 
@@ -2087,7 +2102,7 @@ public extension BFRecordScreenController {
             recordStartPlayTime = currentAssetProgress
             currenStartPlayTime = CMTime.zero
         }
-        playRecord(at: time, periodicTimeObserver: { [weak self] currentT, currentItem in
+        playRecord(at: time, periodicTimeObserver: { [weak self] currentT, _ in
 //            BFLog(1, message: "播放录音进度:\(currentT.seconds),\(currentItem)")
             if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE, self?.isNormalPlaying ?? false {
                 self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))