|
@@ -793,17 +793,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
avatarView.beginRecord()
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-// movie?.startProcessing()
|
|
|
-// assetPlayer?.volume = 0
|
|
|
-// DispatchQueue.global().async { [weak self] in
|
|
|
-// guard let sself = self else {
|
|
|
-// return
|
|
|
-// }
|
|
|
-// sself.speechTranscriberUtil?.delegate = sself
|
|
|
-// sself.speechTranscriberUtil?.startTranscriber()
|
|
|
-// sself.speechTranscriberUtil?.currItemModelIndex = Int32(sself.currItemModelIndex)
|
|
|
-// }
|
|
|
+ DispatchQueue.global().async { [weak self] in
|
|
|
+ guard let sself = self else {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ sself.speechTranscriberUtil?.delegate = sself
|
|
|
+ sself.speechTranscriberUtil?.startTranscriber()
|
|
|
+ }
|
|
|
+
|
|
|
if itemModels[currItemModelIndex].mediaType == .VIDEO {
|
|
|
|
|
|
if !movieIsProcessing {
|
|
@@ -1429,8 +1426,10 @@ extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
|
|
|
// 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
|
|
|
let newSubtitle = PQEditSubTitleModel()
|
|
|
- newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["begin_time"]) as? Int) ?? 0) / 1000.0
|
|
|
- // 单位:毫秒。
|
|
|
+
|
|
|
+ //这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
+ newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
+
|
|
|
newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
if showText.count > subtitleMaxlength {
|