浏览代码

Merge branch 'master' of https://git.yishihui.com/iOS/BFRecordScreenKit

* 'master' of https://git.yishihui.com/iOS/BFRecordScreenKit:
  1.event report change
  使用一个录音工具

# Conflicts:
#	BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift
harry 3 年之前
父节点
当前提交
9528e08e35

+ 0 - 1
BFRecordScreenKit.podspec

@@ -46,7 +46,6 @@ TODO: Add long description of the pod here.
   s.dependency 'BFCommonKit'
   s.dependency 'BFNetRequestKit'
   s.dependency 'BFMaterialKit'
-  s.dependency 'BFAnalyzeKit'
   s.dependency 'BFMediaKit'
   s.dependency 'BFUIKit'
   s.dependency 'GPUImage'

+ 63 - 80
BFRecordScreenKit/Classes/BFVoiceRecordManager.swift

@@ -9,16 +9,33 @@ import BFCommonKit
 import BFMediaKit
 import Foundation
 
-class BFVoiceRecordManager {
+class BFVoiceRecordManager:NSObject {
     // 录音相关
-    var audioRecorder: NXAudioRecorder?
-    var limitedDuration: Double = 600 // 限制录制时长
+    var audioRecorder: BFRecorderManager?
+    // 录音结果回调
     var endRecordHandle: ((PQVoiceModel?, Error?) -> Void)?
+    
     var cancelRecordHandle: ((Error?) -> Void)?
 
     var recorderFilePath: String = ""
     var beginRecordTime: Date = Date()
     var voiceModel: PQVoiceModel?
+    
+    //字幕的回调
+    var subtitleRecordHandle: ((String?) -> Void)?
+    //进度回调
+    var recorderProgrossHandle: ((Float64?) -> Void)?
+    
+    
+    /// 初始化方法
+    /// - Parameters:
+    ///   - token: NLS
+    ///   - appid: NLS
+    public init(token:String,appid:String){
+        super.init()
+        audioRecorder = BFRecorderManager.init(token, appid: appid)
+        audioRecorder?.delegate = self
+    }
 
     /// 录制音频。 index初衷是记录录音顺序,废弃了
     func startRecord(index: Int) {
@@ -30,29 +47,11 @@ class BFVoiceRecordManager {
         }
         recorderFilePath.append("recorder_\(index)_\(Date().timeIntervalSince1970).wav")
         BFLog(1, message: "开始录音 \(recorderFilePath)")
-        do {
-            try audioRecorder = NXAudioRecorder(path: recorderFilePath)
-
-        } catch {
-            BFLog(message: "录音准备失败,当前无法录音 \(error))")
-            cancelRecordHandle?(error)
-            return
-        }
 
-//        audioRecorder?.recorderProgross = { [weak self] timer in
-//            // 所有小段加在一起 > 10 min 自动取消
-//            let sumTime = timer + (self?.getAudioFileslDuration() ?? 0)
-//            if sumTime >= (self?.limitedDuration ?? 600) {
-//                cShowHUB(superView: nil, msg: "最长可录音10分钟")
-//                self?.stopRecord(isCancel: false)
-//                return
-//            }
-//        }
         BFLog(1, message: "开始录制")
-        audioRecorder?.startRecord()
+        audioRecorder?.startRecord(recorderFilePath)
         beginRecordTime = Date()
 
-//        beginRecordTime = Date().timeIntervalSince1970
     }
 
     /// 取消音频录制
@@ -65,72 +64,56 @@ class BFVoiceRecordManager {
     func endRecord() {
         stopRecord(isCancel: false)
     }
-
-    // 取所有声音文件的时长
-    func getAudioFileslDuration() -> Float64 {
-        let duration: Float64 = 0.0
-//        if recorderPart.cacheRecorderCount == 0 { return duration }
-//        for fileURL in recorderPart.cacheRecorderFiles {
-//            duration = duration + AVURLAsset(url: fileURL, options: avAssertOptions).duration.seconds
-//            BFLog(message: "duration === \(duration)")
-//        }
-        return duration
-    }
-
     /// 停止录制 1,正常停止 2,取消停止
     /// - Parameter isCancel: 是否为取消
     func stopRecord(isCancel: Bool) {
-        if !(audioRecorder?.recorder.isRecording ?? false) {
-            BFLog(message: "不是录制状态")
-            return
+//        if !(audioRecorder?.voiceRecorder.isStarted() ?? false) {
+//            BFLog(message: "不是录制状态")
+//            return
+//        }
+        audioRecorder?.stopRecord()
+    }
+ 
+}
+// MARK: - 录音机回调
+extension BFVoiceRecordManager: BFRecorderManagerDelegate{
+    
+    public func recorderProgress(_: BFRecorderManager, recoderTime:Double){
+        BFLog(message: "录音机进度:\(recoderTime)")
+        recorderProgrossHandle?(recoderTime)
+    }
+    public func recorderDidStop(_ outfile: String) {
+        if Date().timeIntervalSince(beginRecordTime) < 1 {
+            cShowHUB(superView: nil, msg: "说话时间太短")
         }
 
-        audioRecorder?.stopRecord { [weak self] isSuccess, url in
-            guard let strongSelf = self else { return }
-
-            if strongSelf.getAudioFileslDuration() < 599, Date().timeIntervalSince(strongSelf.beginRecordTime) < 1 {
-                cShowHUB(superView: nil, msg: "说话时间太短")
+        let duration = Date().timeIntervalSince(beginRecordTime)
+        if  duration > 1 {
+       
+            // 处理降噪
+            let noiseFilePath = outfile.replacingOccurrences(of: ".wav", with: "_noise_\(1)_.wav")
+            BFLog(1, message: "降噪后地址:\(noiseFilePath)")
+            NXNoiseReduction().denoise(outfile, outFile: noiseFilePath)
+            if let model = voiceModel {
+                model.wavFilePath = noiseFilePath
+                model.duration = "\(duration)"
+                endRecordHandle?(model, nil)
             }
-
-            let duration = Date().timeIntervalSince(strongSelf.beginRecordTime)
-            if isSuccess && !isCancel && duration > 1 {
-                BFLog(1, message: "结束录音  结果:\(isSuccess) \n url is \(url)")
-
-                // 处理降噪
-                let noiseFilePath = url.replacingOccurrences(of: ".wav", with: "_noise_\(1)_.wav")
-                BFLog(1, message: "降噪后地址:\(noiseFilePath)")
-                NXNoiseReduction().denoise(url, outFile: noiseFilePath)
-                if let model = self?.voiceModel {
-                    model.wavFilePath = noiseFilePath
-                    model.duration = "\(duration)"
-                    self?.endRecordHandle?(model, nil)
-                }
-
-//
-//                strongSelf.recorderPart.cacheRecorderFiles.append(URL(fileURLWithPath: noiseFilePath))
-
-                // 删除录制的原文件
-                do {
-                    try FileManager.default.removeItem(atPath: url)
-                    print("Success to remove recorder file. \(url)")
-                } catch {
-                    print("Failed to remove recorder file. \(url)")
-                }
+  
+            // 删除录制的原文件
+            do {
+                try FileManager.default.removeItem(atPath: outfile)
+                print("Success to remove recorder file. \(outfile)")
+            } catch {
+                print("Failed to remove recorder file. \(outfile)")
             }
         }
     }
-
-    // 合并 成 MP3文件
-    func mergeToMP3file() {
-//        isMergeIng = true
-//        PQPlayerViewModel.mergeAudios(urls: recorderPart.cacheRecorderFiles) { [weak self] completURL in
-//
-//            BFLog(message: "completURL is \(String(describing: completURL))")
-//
-//            if completURL?.absoluteString.count ?? 0 > 0 {
-//                self?.recorderPart.compliteMP3AudioFile = completURL?.relativePath ?? ""
-//                self?.isMergeIng = false
-//            }
-//        }
+    
+    public func eventCallback(_: BFRecorderManager, asrResult: String) {
+        
+        subtitleRecordHandle?(asrResult)
     }
+
 }
+ 

+ 190 - 178
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -27,7 +27,10 @@ public class BFRecordScreenController: BFBaseViewController {
     public var nextActionHandle: (() -> Void)?
     public var closeActionHandle: (() -> Void)?
     public var changeItemHandle: ((_ index: Int) -> Void)?
-
+    // 当前录制结束的上报
+    public var recordRndHandle: ((_ currentRecord: PQVoiceModel?) -> Void)?
+    // 字幕按钮点击上报
+    public var subTitleBtnClickHandle: ((_ isOn: Bool) -> Void)?
     // MARK: - 录制参数
 
     public var assets = [PHAsset]()
@@ -95,78 +98,12 @@ public class BFRecordScreenController: BFBaseViewController {
                                          AVEncoderBitDepthHintKey: 16, // 位深
                                          AVEncoderAudioQualityKey: AVAudioQuality.medium.rawValue] // 音频质量
 
-    public var haveSpeakVolume: Float = 0.0
-    public var noSpeakVolume: Float = 1.0
-
-    lazy var recorderManager: BFVoiceRecordManager = {
-        let manager = BFVoiceRecordManager()
-        manager.cancelRecordHandle = { _ in
-        }
-        manager.endRecordHandle = { [weak self] model, _ in
-            if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
-                // 加入到语音数组里
-                // 注:如果是图片则在录制结束后纠正当前进度
-                if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
-                    var duration: Double = 0
-                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
-                        if tempModel.endTime < sself.currentAssetProgress.seconds {
-                            duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
-                        }
-                    }
-                    duration = duration + (Double(model.duration ?? "0") ?? 0)
-                    sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
-                }
-                model.endTime = sself.currentAssetProgress.seconds
-
-                let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
-
-                var deletedVoices = [(PQVoiceModel, Int)]()
-
-                for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
-                    let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
-
-                    if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
-                        deletedVoices.append((m, i))
-                        continue
-                    }
-                }
-                sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
-                    let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
-                    return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
-                }
-                BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
-
-                var event = sself.events.last
-                if event != nil {
-                    event!.deletedVoices = deletedVoices
-                    sself.events.removeLast()
-                    sself.events.append(event!)
-                }
-                sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
-                if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
-                    var duration: Double = 0
-                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
-                        duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
-                    }
-                    sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
-                    self?.isEndPlay = true
-                }
-                DispatchQueue.main.async { [weak self] in
-                    // 录音完,重绘撤销按钮,更新录音按钮,
-                    self?.changeWithDrawBtnLayout(true)
-                    // 注:在录制结束时矫正当前位置,避免跟指针无法对其
-                    self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
-                    self?.progressThumV.progress = self?.currentAssetProgress.seconds ?? 0
-                    self?.deleteRecordBtn.isHidden = true
-                    self?.recordBtn.isHidden = false
-                }
-                sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
-                // 重置录制开始时间
-                sself.recordStartTime = 0
-            }
-        }
-        return manager
-    }()
+    public var haveSpeakVolume : Float = 0.0
+    public var noSpeakVolume : Float = 1.0
+    
+    //录音管理器
+    var recorderManager : BFVoiceRecordManager?
+ 
 
     // MARK: - 视图参数
 
@@ -356,8 +293,6 @@ public class BFRecordScreenController: BFBaseViewController {
         return audioSettingView
     }()
 
-    // 录音识别文字
-    var speechTranscriberUtil: PQSpeechTranscriberUtil?
 
     lazy var progressThumV: BFVideoThumbProgressView = {
         let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
@@ -426,7 +361,7 @@ public class BFRecordScreenController: BFBaseViewController {
         NotificationCenter.default.removeObserver(self)
         avplayerTimeObserver?.invalidate()
         if isRecording {
-            recorderManager.stopRecord(isCancel: true)
+            recorderManager?.stopRecord(isCancel: true)
         }
         assetPlayer?.pause()
         recordPlayer?.pause()
@@ -450,7 +385,119 @@ public class BFRecordScreenController: BFBaseViewController {
         // add by ak 取 nsl token
         BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
             BFLog(message: "nls appkey is \(appkey), token is \(token)")
-//            self?.speechTranscriberUtil = PQSpeechTranscriberUtil(token, appid: appkey)
+
+            self?.recorderManager = BFVoiceRecordManager.init(token: token, appid: appkey)
+        
+            //取消
+            self?.recorderManager?.cancelRecordHandle = { _ in
+            }
+            
+            self?.recorderManager?.recorderProgrossHandle = {[weak self] progress in
+                BFLog(1, message: "curr:录音进度--\(progress) \(self?.recordStartTime ) \(self?.isRecording)")
+                if self?.indirectionView == nil {
+                    self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
+                    self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
+                }
+                // 更新录制进度
+                // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
+                // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
+                if self?.isRecording ?? false {
+                    let ratioX = 0.08
+                    self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: (progress ?? 0.0) - ratioX)
+                }
+                if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
+                    self?.imageRecordProgress(isRecord: true, progress: progress ?? 0)
+                }
+
+                
+            }
+            
+            self?.recorderManager?.subtitleRecordHandle = {[weak self] asrResult in
+                if(asrResult == nil){
+                    BFLog(message: "识别结果为空????不能生成字幕数据")
+                    return
+                }
+                let dicResult: [String: Any]? = jsonStringToDictionary(asrResult!)
+
+                let payload = dicResult?["payload"] as? [String: Any]
+
+                BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(self?.recorderManager?.voiceModel?.startTime ?? 0.0)")
+                DispatchQueue.main.async {
+                    // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
+
+                    let newSubtitle = PQEditSubTitleModel()
+                    
+                    //这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
+                    newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
+                  
+                    newSubtitle.timelineOut = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
+                    var showText = ((payload?["result"]) as? String) ?? ""
+                    if showText.count > subtitleMaxlength {
+                        showText = showText.substring(to: subtitleMaxlength)
+                        showText += "..."
+                    }
+                    newSubtitle.text = showText
+
+                    BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
+                    newSubtitle.setting = self?.subtitleSettingView.subtitle.setting ?? BFSubTitileSettingModel()
+
+                    self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.append(newSubtitle)
+                }
+                
+            }
+            self?.recorderManager?.endRecordHandle = { [weak self] voideModel, _ in
+                if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
+                    // 加入到语音数组里
+                    model.endTime = sself.currentAssetProgress.seconds
+
+                    let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
+
+                    var deletedVoices = [(PQVoiceModel, Int)]()
+
+                    for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
+                        let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
+
+                        if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
+                            deletedVoices.append((m, i))
+                            continue
+                        }
+                    }
+                    sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
+                        let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
+                        return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
+                    }
+                    BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
+                    
+                    var event = sself.events.last
+                    if event != nil {
+                        event!.deletedVoices = deletedVoices
+                        sself.events.removeLast()
+                        sself.events.append(event!)
+                    }
+                    sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
+                    if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
+                        var duration: Double = 0
+                        sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
+                            duration = duration + (Double(model.duration ?? "0") ?? 0)
+                        }
+                        sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
+                        sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
+                        model.endTime = sself.currentAssetProgress.seconds
+                        self?.isEndPlay = true
+                    }
+                    DispatchQueue.main.async {[weak self] in
+                        // 录音完,重绘撤销按钮,更新录音按钮,
+                        self?.changeWithDrawBtnLayout(true)
+                        // 注:在录制结束时矫正当前位置,避免跟指针无法对其
+                        self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
+                        self?.deleteRecordBtn.isHidden = true
+                        self?.recordBtn.isHidden = false
+                    }
+                    sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
+                    // 重置录制开始时间
+                    sself.recordStartTime = 0
+                }
+            }
         }
 
         view.backgroundColor = .black
@@ -536,6 +583,8 @@ public class BFRecordScreenController: BFBaseViewController {
     /// 更新字幕,在回放时使用
     /// - Parameter time: 当前播放的进度
     func updateSubtitle(time: CMTime) {
+        
+        BFLog(message: "currTime is \(CMTimeGetSeconds(time))")
         var findShowSubtitle: PQEditSubTitleModel?
         for (index, subtitle) in itemModels[currItemModelIndex].titleStickers.enumerated() {
             if subtitle.timelineIn <= CMTimeGetSeconds(time), subtitle.timelineOut >= CMTimeGetSeconds(time) {
@@ -717,6 +766,9 @@ public class BFRecordScreenController: BFBaseViewController {
     @objc func subTitleClick() {
         BFLog(message: "subTitle Click ")
         subtitleSettingView.isHidden = !subtitleSettingView.isHidden
+        if subTitleBtnClickHandle != nil {
+            subTitleBtnClickHandle!(subtitleSettingView.subtitle.setting.subtitleIsShow)
+        }
     }
 
     // 声音设置
@@ -770,46 +822,31 @@ public class BFRecordScreenController: BFBaseViewController {
         pause()
         isRecording = true
 
+        if(self.recorderManager == nil){
+            BFLog(message: "录音机初始化错误!!!")
+            return
+            
+        }
+        
         let model = PQVoiceModel()
         // 开始时间
         model.startTime = currentAssetProgress.seconds
         model.volume = 100
-        recorderManager.voiceModel = model
-        recorderManager.startRecord(index: 1)
+        recorderManager?.voiceModel = model
+        recorderManager?.startRecord(index: 1)
         if recordStartTime <= 0 {
             recordStartTime = currentAssetProgress.seconds
         }
         // 添加撤销记录点
         events.append(WithDrawModel(type: 2, timestamp: model.startTime))
 
-//        DispatchQueue.main.async {[weak self] in
-//            let model = PQVoiceModel()
-//            model.startTime = self?.currentAssetProgress.seconds ?? 0
-//            model.volume = 100
-//            self?.recorderManager.voiceModel = model
-//            self?.recorderManager.startRecord(index: 1)
-//            if self?.recordStartTime ?? 0 <= 0 {
-//                self?.recordStartTime = self?.currentAssetProgress.seconds ?? 0
-//            }
-//            // 添加撤销记录点
-//            self?.events.append(WithDrawModel(type: 2, timestamp: model.startTime))
-//
-//        }
-
         isRecording = true
 
+
         if !avatarView.isHidden {
             avatarView.beginRecord()
         }
 
-        DispatchQueue.global().async { [weak self] in
-            guard let sself = self else {
-                return
-            }
-            sself.speechTranscriberUtil?.delegate = sself
-            sself.speechTranscriberUtil?.startTranscriber()
-        }
-
         if itemModels[currItemModelIndex].mediaType == .VIDEO {
             if !movieIsProcessing {
                 movie?.startProcessing()
@@ -821,40 +858,15 @@ public class BFRecordScreenController: BFBaseViewController {
             // 暂停状态
             (collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
         }
-
-        recorderManager.audioRecorder?.recorderProgross = { [weak self] progress in
-            BFLog(1, message: "curr:录音进度--\(progress)")
-            guard let sself = self else {
-                return
-            }
-            if sself.indirectionView == nil {
-                sself.indirectionView = BFIndirectionProgressView(frame: sself.progressThumV.progessIndicateBackV.bounds , percenWidth: sself.itemModels[sself.currItemModelIndex ].mediaType == .IMAGE ? (sself.progressThumV.thumbImageWidth ) / 2 : 0, totalDuration: sself.itemModels[sself.currItemModelIndex].materialDuraion )
-                sself.progressThumV.progessIndicateBackV.addSubview((sself.indirectionView)!)
-            }
-            // 更新录制进度
-            // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
-            // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
-
-            if sself.isRecording {
-                let progress = sself.itemModels[sself.currItemModelIndex ?? 0].mediaType == .IMAGE ? progress : progress - 0.08
-                sself.indirectionView?.setProgress(start: sself.recordStartTime ?? 0, progress: progress)
-                if sself.itemModels[sself.currItemModelIndex ].mediaType == .IMAGE {
-                    sself.imageRecordProgress(isRecord: true, progress: progress)
-                }
-            }
-        }
     }
 
     @objc func endRecord() {
-        DispatchQueue.global().async {
-            self.speechTranscriberUtil?.endTranscriber()
-        }
-//        playBtn.isSelected = true
+ 
         // 存储录音
         isRecording = false
         pause()
 
-        recorderManager.endRecord()
+        recorderManager?.endRecord()
 
         if !avatarView.isHidden {
             avatarView.endRecord()
@@ -863,7 +875,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
     func cancleRecord() {
         isRecording = false
-        recorderManager.cancleRecord()
+        recorderManager?.cancleRecord()
 
         pause()
     }
@@ -1383,60 +1395,37 @@ public class BFRecordScreenController: BFBaseViewController {
             }
         }
     }
-}
-
-extension BFRecordScreenController: GPUImageMovieDelegate {
-    public func didCompletePlayingMovie() {
-        BFLog(1, message: "播放结束")
-        currentPlayRecordIndex = -1
-    }
-}
-
-extension BFRecordScreenController: AVAudioRecorderDelegate {
-    public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
-        BFLog(1, message: "录音结束")
-    }
-}
-
-extension BFRecordScreenController: AVAudioPlayerDelegate {
-    public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
-        BFLog(1, message: "录音播放结束")
-    }
-}
-
-// MARK: - 语音转文字代理
-
-extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
-    public func eventCallback(_: PQSpeechTranscriberUtil, asrResult: String) {
-        let dicResult: [String: Any]? = jsonStringToDictionary(asrResult)
-
-        let payload = dicResult?["payload"] as? [String: Any]
-
-        BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(recorderManager.voiceModel?.startTime ?? 0.0)")
-        DispatchQueue.main.async {
-            // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
-
-            let newSubtitle = PQEditSubTitleModel()
-
-            // 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
-            newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
-
-            newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
-            var showText = ((payload?["result"]) as? String) ?? ""
-            if showText.count > subtitleMaxlength {
-                showText = showText.substring(to: subtitleMaxlength)
-                showText += "..."
+ 
+    func drawOrUpdateRecordProgessLable() {
+        DispatchQueue.main.async { [weak self] in
+            guard let sself = self else {
+                return
             }
-            newSubtitle.text = showText
-
-            BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
-            newSubtitle.setting = self.subtitleSettingView.subtitle.setting
 
-            self.itemModels[self.currItemModelIndex].titleStickers.append(newSubtitle)
+            sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
+                vv.removeFromSuperview()
+            }
+            let totalDur = sself.itemModels[sself.currItemModelIndex].materialDuraion
+            let height = sself.progressThumV.progessIndicateBackV.height
+            if sself.itemModels[sself.currItemModelIndex].mediaType == .VIDEO {
+                if totalDur > 0, sself.itemModels[sself.currItemModelIndex].voiceStickers.count > 0 {
+                    let width = sself.progressThumV.progessIndicateBackV.width
+                    sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { model in
+                        let lineV = UIView(frame: CGRect(x: model.startTime * Double(width) / totalDur, y: 0, width: (model.endTime - model.startTime) * Double(width) / totalDur, height: Double(height)))
+                        lineV.backgroundColor = ThemeStyleColor
+                        sself.progressThumV.progessIndicateBackV.addSubview(lineV)
+                    }
+                }
+            } else {
+                let lineV = UIView(frame: CGRect(x: 0, y: 0, width: totalDur * sself.progressThumV.thumbImageWidth / 2, height: Double(height)))
+                lineV.backgroundColor = ThemeStyleColor
+                sself.progressThumV.progessIndicateBackV.addSubview(lineV)
+            }
         }
     }
-
+  
     func reloadMaterial(recordItem: BFRecordItemModel) {
+
         if let path = recordItem.localPath, let lastCell: BFImageCoverViewCell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
             setVideoPlay(item: recordItem.playItem, imageView: lastCell.playView)
             setAudioPlay(item: recordItem.playItem)
@@ -1454,8 +1443,31 @@ extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
             }
         }
     }
+
+}
+
+extension BFRecordScreenController: GPUImageMovieDelegate {
+    public func didCompletePlayingMovie() {
+        BFLog(1, message: "播放结束")
+        currentPlayRecordIndex = -1
+    }
+}
+
+extension BFRecordScreenController: AVAudioRecorderDelegate {
+    public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
+        BFLog(1, message: "录音结束")
+    }
+}
+
+extension BFRecordScreenController: AVAudioPlayerDelegate {
+    public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
+        BFLog(1, message: "录音播放结束")
+    }
 }
 
+
+
+
 // MARK: - UICollectionViewDelegate
 
 /// UICollectionViewDelegate