|
@@ -27,7 +27,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
public var nextActionHandle: (() -> Void)?
|
|
|
public var closeActionHandle: (() -> Void)?
|
|
|
public var changeItemHandle: ((_ index: Int) -> Void)?
|
|
|
-
|
|
|
+ // 当前录制结束的上报
|
|
|
+ public var recordRndHandle: ((_ currentRecord: PQVoiceModel?) -> Void)?
|
|
|
+ // 字幕按钮点击上报
|
|
|
+ public var subTitleBtnClickHandle: ((_ isOn: Bool) -> Void)?
|
|
|
// MARK: - 录制参数
|
|
|
|
|
|
public var assets = [PHAsset]()
|
|
@@ -95,78 +98,12 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
AVEncoderBitDepthHintKey: 16, // 位深
|
|
|
AVEncoderAudioQualityKey: AVAudioQuality.medium.rawValue] // 音频质量
|
|
|
|
|
|
- public var haveSpeakVolume: Float = 0.0
|
|
|
- public var noSpeakVolume: Float = 1.0
|
|
|
-
|
|
|
- lazy var recorderManager: BFVoiceRecordManager = {
|
|
|
- let manager = BFVoiceRecordManager()
|
|
|
- manager.cancelRecordHandle = { _ in
|
|
|
- }
|
|
|
- manager.endRecordHandle = { [weak self] model, _ in
|
|
|
- if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
|
- // 加入到语音数组里
|
|
|
- // 注:如果是图片则在录制结束后纠正当前进度
|
|
|
- if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
- var duration: Double = 0
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
|
|
|
- if tempModel.endTime < sself.currentAssetProgress.seconds {
|
|
|
- duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
|
|
|
- }
|
|
|
- }
|
|
|
- duration = duration + (Double(model.duration ?? "0") ?? 0)
|
|
|
- sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
|
|
|
- }
|
|
|
- model.endTime = sself.currentAssetProgress.seconds
|
|
|
-
|
|
|
- let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
|
|
|
-
|
|
|
- var deletedVoices = [(PQVoiceModel, Int)]()
|
|
|
-
|
|
|
- for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
|
|
|
- let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
-
|
|
|
- if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
|
|
|
- deletedVoices.append((m, i))
|
|
|
- continue
|
|
|
- }
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
|
|
|
- let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
- return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
|
|
|
- }
|
|
|
- BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
-
|
|
|
- var event = sself.events.last
|
|
|
- if event != nil {
|
|
|
- event!.deletedVoices = deletedVoices
|
|
|
- sself.events.removeLast()
|
|
|
- sself.events.append(event!)
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
- if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
- var duration: Double = 0
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { tempModel in
|
|
|
- duration = duration + (Double(tempModel.duration ?? "0") ?? 0)
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
- self?.isEndPlay = true
|
|
|
- }
|
|
|
- DispatchQueue.main.async { [weak self] in
|
|
|
- // 录音完,重绘撤销按钮,更新录音按钮,
|
|
|
- self?.changeWithDrawBtnLayout(true)
|
|
|
- // 注:在录制结束时矫正当前位置,避免跟指针无法对其
|
|
|
- self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
|
|
|
- self?.progressThumV.progress = self?.currentAssetProgress.seconds ?? 0
|
|
|
- self?.deleteRecordBtn.isHidden = true
|
|
|
- self?.recordBtn.isHidden = false
|
|
|
- }
|
|
|
- sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
- // 重置录制开始时间
|
|
|
- sself.recordStartTime = 0
|
|
|
- }
|
|
|
- }
|
|
|
- return manager
|
|
|
- }()
|
|
|
+ public var haveSpeakVolume : Float = 0.0
|
|
|
+ public var noSpeakVolume : Float = 1.0
|
|
|
+
|
|
|
+ //录音管理器
|
|
|
+ var recorderManager : BFVoiceRecordManager?
|
|
|
+
|
|
|
|
|
|
// MARK: - 视图参数
|
|
|
|
|
@@ -356,8 +293,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return audioSettingView
|
|
|
}()
|
|
|
|
|
|
- // 录音识别文字
|
|
|
- var speechTranscriberUtil: PQSpeechTranscriberUtil?
|
|
|
|
|
|
lazy var progressThumV: BFVideoThumbProgressView = {
|
|
|
let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
|
|
@@ -426,7 +361,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
NotificationCenter.default.removeObserver(self)
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
if isRecording {
|
|
|
- recorderManager.stopRecord(isCancel: true)
|
|
|
+ recorderManager?.stopRecord(isCancel: true)
|
|
|
}
|
|
|
assetPlayer?.pause()
|
|
|
recordPlayer?.pause()
|
|
@@ -450,7 +385,119 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// add by ak 取 nsl token
|
|
|
BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
|
|
|
BFLog(message: "nls appkey is \(appkey), token is \(token)")
|
|
|
-// self?.speechTranscriberUtil = PQSpeechTranscriberUtil(token, appid: appkey)
|
|
|
+
|
|
|
+ self?.recorderManager = BFVoiceRecordManager.init(token: token, appid: appkey)
|
|
|
+
|
|
|
+ //取消
|
|
|
+ self?.recorderManager?.cancelRecordHandle = { _ in
|
|
|
+ }
|
|
|
+
|
|
|
+ self?.recorderManager?.recorderProgrossHandle = {[weak self] progress in
|
|
|
+ BFLog(1, message: "curr:录音进度--\(progress) \(self?.recordStartTime ) \(self?.isRecording)")
|
|
|
+ if self?.indirectionView == nil {
|
|
|
+ self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
|
|
|
+ self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
|
|
|
+ }
|
|
|
+ // 更新录制进度
|
|
|
+ // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
+ // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
+ if self?.isRecording ?? false {
|
|
|
+ let ratioX = 0.08
|
|
|
+ self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: (progress ?? 0.0) - ratioX)
|
|
|
+ }
|
|
|
+ if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
|
|
|
+ self?.imageRecordProgress(isRecord: true, progress: progress ?? 0)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ self?.recorderManager?.subtitleRecordHandle = {[weak self] asrResult in
|
|
|
+ if(asrResult == nil){
|
|
|
+ BFLog(message: "识别结果为空????不能生成字幕数据")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let dicResult: [String: Any]? = jsonStringToDictionary(asrResult!)
|
|
|
+
|
|
|
+ let payload = dicResult?["payload"] as? [String: Any]
|
|
|
+
|
|
|
+ BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(self?.recorderManager?.voiceModel?.startTime ?? 0.0)")
|
|
|
+ DispatchQueue.main.async {
|
|
|
+ // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
+
|
|
|
+ let newSubtitle = PQEditSubTitleModel()
|
|
|
+
|
|
|
+ //这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
+ newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
+
|
|
|
+ newSubtitle.timelineOut = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
+ var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
+ if showText.count > subtitleMaxlength {
|
|
|
+ showText = showText.substring(to: subtitleMaxlength)
|
|
|
+ showText += "..."
|
|
|
+ }
|
|
|
+ newSubtitle.text = showText
|
|
|
+
|
|
|
+ BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
|
|
|
+ newSubtitle.setting = self?.subtitleSettingView.subtitle.setting ?? BFSubTitileSettingModel()
|
|
|
+
|
|
|
+ self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.append(newSubtitle)
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ self?.recorderManager?.endRecordHandle = { [weak self] voideModel, _ in
|
|
|
+ if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
|
+ // 加入到语音数组里
|
|
|
+ model.endTime = sself.currentAssetProgress.seconds
|
|
|
+
|
|
|
+ let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
|
|
|
+
|
|
|
+ var deletedVoices = [(PQVoiceModel, Int)]()
|
|
|
+
|
|
|
+ for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
|
|
|
+ let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
+
|
|
|
+ if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
|
|
|
+ deletedVoices.append((m, i))
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
|
|
|
+ let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
+ return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
|
|
|
+ }
|
|
|
+ BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
+
|
|
|
+ var event = sself.events.last
|
|
|
+ if event != nil {
|
|
|
+ event!.deletedVoices = deletedVoices
|
|
|
+ sself.events.removeLast()
|
|
|
+ sself.events.append(event!)
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
+ if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
+ var duration: Double = 0
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
|
|
|
+ duration = duration + (Double(model.duration ?? "0") ?? 0)
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
+ sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
|
|
|
+ model.endTime = sself.currentAssetProgress.seconds
|
|
|
+ self?.isEndPlay = true
|
|
|
+ }
|
|
|
+ DispatchQueue.main.async {[weak self] in
|
|
|
+ // 录音完,重绘撤销按钮,更新录音按钮,
|
|
|
+ self?.changeWithDrawBtnLayout(true)
|
|
|
+ // 注:在录制结束时矫正当前位置,避免跟指针无法对其
|
|
|
+ self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
|
|
|
+ self?.deleteRecordBtn.isHidden = true
|
|
|
+ self?.recordBtn.isHidden = false
|
|
|
+ }
|
|
|
+ sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
+ // 重置录制开始时间
|
|
|
+ sself.recordStartTime = 0
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
view.backgroundColor = .black
|
|
@@ -536,6 +583,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
/// 更新字幕,在回放时使用
|
|
|
/// - Parameter time: 当前播放的进度
|
|
|
func updateSubtitle(time: CMTime) {
|
|
|
+
|
|
|
+ BFLog(message: "currTime is \(CMTimeGetSeconds(time))")
|
|
|
var findShowSubtitle: PQEditSubTitleModel?
|
|
|
for (index, subtitle) in itemModels[currItemModelIndex].titleStickers.enumerated() {
|
|
|
if subtitle.timelineIn <= CMTimeGetSeconds(time), subtitle.timelineOut >= CMTimeGetSeconds(time) {
|
|
@@ -717,6 +766,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
@objc func subTitleClick() {
|
|
|
BFLog(message: "subTitle Click ")
|
|
|
subtitleSettingView.isHidden = !subtitleSettingView.isHidden
|
|
|
+ if subTitleBtnClickHandle != nil {
|
|
|
+ subTitleBtnClickHandle!(subtitleSettingView.subtitle.setting.subtitleIsShow)
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// 声音设置
|
|
@@ -770,46 +822,31 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
pause()
|
|
|
isRecording = true
|
|
|
|
|
|
+ if(self.recorderManager == nil){
|
|
|
+ BFLog(message: "录音机初始化错误!!!")
|
|
|
+ return
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
let model = PQVoiceModel()
|
|
|
// 开始时间
|
|
|
model.startTime = currentAssetProgress.seconds
|
|
|
model.volume = 100
|
|
|
- recorderManager.voiceModel = model
|
|
|
- recorderManager.startRecord(index: 1)
|
|
|
+ recorderManager?.voiceModel = model
|
|
|
+ recorderManager?.startRecord(index: 1)
|
|
|
if recordStartTime <= 0 {
|
|
|
recordStartTime = currentAssetProgress.seconds
|
|
|
}
|
|
|
// 添加撤销记录点
|
|
|
events.append(WithDrawModel(type: 2, timestamp: model.startTime))
|
|
|
|
|
|
-// DispatchQueue.main.async {[weak self] in
|
|
|
-// let model = PQVoiceModel()
|
|
|
-// model.startTime = self?.currentAssetProgress.seconds ?? 0
|
|
|
-// model.volume = 100
|
|
|
-// self?.recorderManager.voiceModel = model
|
|
|
-// self?.recorderManager.startRecord(index: 1)
|
|
|
-// if self?.recordStartTime ?? 0 <= 0 {
|
|
|
-// self?.recordStartTime = self?.currentAssetProgress.seconds ?? 0
|
|
|
-// }
|
|
|
-// // 添加撤销记录点
|
|
|
-// self?.events.append(WithDrawModel(type: 2, timestamp: model.startTime))
|
|
|
-//
|
|
|
-// }
|
|
|
-
|
|
|
isRecording = true
|
|
|
|
|
|
+
|
|
|
if !avatarView.isHidden {
|
|
|
avatarView.beginRecord()
|
|
|
}
|
|
|
|
|
|
- DispatchQueue.global().async { [weak self] in
|
|
|
- guard let sself = self else {
|
|
|
- return
|
|
|
- }
|
|
|
- sself.speechTranscriberUtil?.delegate = sself
|
|
|
- sself.speechTranscriberUtil?.startTranscriber()
|
|
|
- }
|
|
|
-
|
|
|
if itemModels[currItemModelIndex].mediaType == .VIDEO {
|
|
|
if !movieIsProcessing {
|
|
|
movie?.startProcessing()
|
|
@@ -821,40 +858,15 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// 暂停状态
|
|
|
(collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
|
|
|
}
|
|
|
-
|
|
|
- recorderManager.audioRecorder?.recorderProgross = { [weak self] progress in
|
|
|
- BFLog(1, message: "curr:录音进度--\(progress)")
|
|
|
- guard let sself = self else {
|
|
|
- return
|
|
|
- }
|
|
|
- if sself.indirectionView == nil {
|
|
|
- sself.indirectionView = BFIndirectionProgressView(frame: sself.progressThumV.progessIndicateBackV.bounds , percenWidth: sself.itemModels[sself.currItemModelIndex ].mediaType == .IMAGE ? (sself.progressThumV.thumbImageWidth ) / 2 : 0, totalDuration: sself.itemModels[sself.currItemModelIndex].materialDuraion )
|
|
|
- sself.progressThumV.progessIndicateBackV.addSubview((sself.indirectionView)!)
|
|
|
- }
|
|
|
- // 更新录制进度
|
|
|
- // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
- // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
-
|
|
|
- if sself.isRecording {
|
|
|
- let progress = sself.itemModels[sself.currItemModelIndex ?? 0].mediaType == .IMAGE ? progress : progress - 0.08
|
|
|
- sself.indirectionView?.setProgress(start: sself.recordStartTime ?? 0, progress: progress)
|
|
|
- if sself.itemModels[sself.currItemModelIndex ].mediaType == .IMAGE {
|
|
|
- sself.imageRecordProgress(isRecord: true, progress: progress)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
}
|
|
|
|
|
|
@objc func endRecord() {
|
|
|
- DispatchQueue.global().async {
|
|
|
- self.speechTranscriberUtil?.endTranscriber()
|
|
|
- }
|
|
|
-// playBtn.isSelected = true
|
|
|
+
|
|
|
// 存储录音
|
|
|
isRecording = false
|
|
|
pause()
|
|
|
|
|
|
- recorderManager.endRecord()
|
|
|
+ recorderManager?.endRecord()
|
|
|
|
|
|
if !avatarView.isHidden {
|
|
|
avatarView.endRecord()
|
|
@@ -863,7 +875,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func cancleRecord() {
|
|
|
isRecording = false
|
|
|
- recorderManager.cancleRecord()
|
|
|
+ recorderManager?.cancleRecord()
|
|
|
|
|
|
pause()
|
|
|
}
|
|
@@ -1383,60 +1395,37 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: GPUImageMovieDelegate {
|
|
|
- public func didCompletePlayingMovie() {
|
|
|
- BFLog(1, message: "播放结束")
|
|
|
- currentPlayRecordIndex = -1
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: AVAudioRecorderDelegate {
|
|
|
- public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
|
|
|
- BFLog(1, message: "录音结束")
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: AVAudioPlayerDelegate {
|
|
|
- public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
|
|
|
- BFLog(1, message: "录音播放结束")
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-// MARK: - 语音转文字代理
|
|
|
-
|
|
|
-extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
|
|
|
- public func eventCallback(_: PQSpeechTranscriberUtil, asrResult: String) {
|
|
|
- let dicResult: [String: Any]? = jsonStringToDictionary(asrResult)
|
|
|
-
|
|
|
- let payload = dicResult?["payload"] as? [String: Any]
|
|
|
-
|
|
|
- BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(recorderManager.voiceModel?.startTime ?? 0.0)")
|
|
|
- DispatchQueue.main.async {
|
|
|
- // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
-
|
|
|
- let newSubtitle = PQEditSubTitleModel()
|
|
|
-
|
|
|
- // 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
- newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
-
|
|
|
- newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
- var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
- if showText.count > subtitleMaxlength {
|
|
|
- showText = showText.substring(to: subtitleMaxlength)
|
|
|
- showText += "..."
|
|
|
+
|
|
|
+ func drawOrUpdateRecordProgessLable() {
|
|
|
+ DispatchQueue.main.async { [weak self] in
|
|
|
+ guard let sself = self else {
|
|
|
+ return
|
|
|
}
|
|
|
- newSubtitle.text = showText
|
|
|
-
|
|
|
- BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
|
|
|
- newSubtitle.setting = self.subtitleSettingView.subtitle.setting
|
|
|
|
|
|
- self.itemModels[self.currItemModelIndex].titleStickers.append(newSubtitle)
|
|
|
+ sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
|
|
|
+ vv.removeFromSuperview()
|
|
|
+ }
|
|
|
+ let totalDur = sself.itemModels[sself.currItemModelIndex].materialDuraion
|
|
|
+ let height = sself.progressThumV.progessIndicateBackV.height
|
|
|
+ if sself.itemModels[sself.currItemModelIndex].mediaType == .VIDEO {
|
|
|
+ if totalDur > 0, sself.itemModels[sself.currItemModelIndex].voiceStickers.count > 0 {
|
|
|
+ let width = sself.progressThumV.progessIndicateBackV.width
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { model in
|
|
|
+ let lineV = UIView(frame: CGRect(x: model.startTime * Double(width) / totalDur, y: 0, width: (model.endTime - model.startTime) * Double(width) / totalDur, height: Double(height)))
|
|
|
+ lineV.backgroundColor = ThemeStyleColor
|
|
|
+ sself.progressThumV.progessIndicateBackV.addSubview(lineV)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ let lineV = UIView(frame: CGRect(x: 0, y: 0, width: totalDur * sself.progressThumV.thumbImageWidth / 2, height: Double(height)))
|
|
|
+ lineV.backgroundColor = ThemeStyleColor
|
|
|
+ sself.progressThumV.progessIndicateBackV.addSubview(lineV)
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
func reloadMaterial(recordItem: BFRecordItemModel) {
|
|
|
+
|
|
|
if let path = recordItem.localPath, let lastCell: BFImageCoverViewCell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
|
|
|
setVideoPlay(item: recordItem.playItem, imageView: lastCell.playView)
|
|
|
setAudioPlay(item: recordItem.playItem)
|
|
@@ -1454,8 +1443,31 @@ extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+extension BFRecordScreenController: GPUImageMovieDelegate {
|
|
|
+ public func didCompletePlayingMovie() {
|
|
|
+ BFLog(1, message: "播放结束")
|
|
|
+ currentPlayRecordIndex = -1
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+extension BFRecordScreenController: AVAudioRecorderDelegate {
|
|
|
+ public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
|
|
|
+ BFLog(1, message: "录音结束")
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+extension BFRecordScreenController: AVAudioPlayerDelegate {
|
|
|
+ public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
|
|
|
+ BFLog(1, message: "录音播放结束")
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
// MARK: - UICollectionViewDelegate
|
|
|
|
|
|
/// UICollectionViewDelegate
|