|
@@ -6,6 +6,8 @@
|
|
// Copyright © 2021 CocoaPods. All rights reserved.
|
|
// Copyright © 2021 CocoaPods. All rights reserved.
|
|
//
|
|
//
|
|
|
|
|
|
|
|
+import AVFAudio
|
|
|
|
+import AVFoundation
|
|
import BFCommonKit
|
|
import BFCommonKit
|
|
import BFMediaKit
|
|
import BFMediaKit
|
|
import BFNetRequestKit
|
|
import BFNetRequestKit
|
|
@@ -67,7 +69,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
var currentAssetProgress: CMTime = .zero {
|
|
var currentAssetProgress: CMTime = .zero {
|
|
didSet {
|
|
didSet {
|
|
- BFLog(3,message: "currentAssetProgress=\(currentAssetProgress.seconds)")
|
|
|
|
|
|
+ BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
|
|
}
|
|
}
|
|
} // 当前素材播放的进度
|
|
} // 当前素材播放的进度
|
|
// 播放器开始播放时间
|
|
// 播放器开始播放时间
|
|
@@ -424,6 +426,19 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
endRecord()
|
|
endRecord()
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+ /// 线路切换
|
|
|
|
+ /// - Parameter nofify: <#nofify description#>
|
|
|
|
+ @objc func routeChangeNofify(nofify: Notification) {
|
|
|
|
+ let routeChangeDic = nofify.userInfo
|
|
|
|
+ let routeChangeReason: AVAudioSession.RouteChangeReason? = AVAudioSession.RouteChangeReason(rawValue: UInt((routeChangeDic?[AVAudioSessionRouteChangeReasonKey] as? Int) ?? 0))
|
|
|
|
+ if routeChangeReason == .oldDeviceUnavailable {
|
|
|
|
+ let previousRoute = routeChangeDic?[AVAudioSessionRouteChangePreviousRouteKey] as? AVAudioSessionRouteDescription
|
|
|
|
+ if previousRoute?.outputs.first?.portType == .headphones {
|
|
|
|
+ playVideo(btn: playBtn)
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
override public func viewWillAppear(_ animated: Bool) {
|
|
override public func viewWillAppear(_ animated: Bool) {
|
|
super.viewWillAppear(animated)
|
|
super.viewWillAppear(animated)
|
|
navigationController?.isNavigationBarHidden = true
|
|
navigationController?.isNavigationBarHidden = true
|
|
@@ -441,7 +456,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
// 进入活跃状态
|
|
// 进入活跃状态
|
|
PQNotification.addObserver(self, selector: #selector(didBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil)
|
|
PQNotification.addObserver(self, selector: #selector(didBecomeActive), name: UIApplication.didBecomeActiveNotification, object: nil)
|
|
-
|
|
|
|
|
|
+ PQNotification.addObserver(self, selector: #selector(routeChangeNofify(nofify:)), name: AVAudioSession.routeChangeNotification, object: nil)
|
|
// 进入非活跃状态
|
|
// 进入非活跃状态
|
|
PQNotification.addObserver(self, selector: #selector(willResignActive), name: UIApplication.willResignActiveNotification, object: nil)
|
|
PQNotification.addObserver(self, selector: #selector(willResignActive), name: UIApplication.willResignActiveNotification, object: nil)
|
|
|
|
|
|
@@ -477,14 +492,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
let header = dicResult?["header"] as? [String: Any]
|
|
let header = dicResult?["header"] as? [String: Any]
|
|
let payload = dicResult?["payload"] as? [String: Any]
|
|
let payload = dicResult?["payload"] as? [String: Any]
|
|
|
|
|
|
- BFLog(1, message: "识别结果:) \(payload?["result"]),taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\((((payload?["begin_time"]) as? Int) ?? 0)) ~ \((((payload?["time"]) as? Int) ?? 0)) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
|
|
|
|
|
|
+ BFLog(1, message: "识别结果:) \(payload?["result"]) ,taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\(((payload?["begin_time"]) as? Int) ?? 0) ~ \(((payload?["time"]) as? Int) ?? 0) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
|
|
|
|
+
|
|
DispatchQueue.main.async {
|
|
DispatchQueue.main.async {
|
|
// 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startCMTime.seconds 为开始的录制的时间,开始和结束都要加上这个时差
|
|
// 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startCMTime.seconds 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
|
|
|
let newSubtitle = PQEditSubTitleModel()
|
|
let newSubtitle = PQEditSubTitleModel()
|
|
// 任务全局唯一ID,请记录该值,便于排查问题。 每次 startRecorder 和 stopRecoder 之间 task_Id都不会变化
|
|
// 任务全局唯一ID,请记录该值,便于排查问题。 每次 startRecorder 和 stopRecoder 之间 task_Id都不会变化
|
|
newSubtitle.taskID = (header?["task_id"] as? String) ?? ""
|
|
newSubtitle.taskID = (header?["task_id"] as? String) ?? ""
|
|
-
|
|
|
|
BFLog(1, message: "对应关系:字幕所属地址:\((audioFilePath ?? "b").replacingOccurrences(of: documensDirectory, with: "")), 开始录音输入:\((self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").replacingOccurrences(of: documensDirectory, with: ""))")
|
|
BFLog(1, message: "对应关系:字幕所属地址:\((audioFilePath ?? "b").replacingOccurrences(of: documensDirectory, with: "")), 开始录音输入:\((self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").replacingOccurrences(of: documensDirectory, with: ""))")
|
|
// 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
// 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
if let audioUrl = audioFilePath, URL(fileURLWithPath: audioUrl).deletingPathExtension().lastPathComponent.contains(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent) {
|
|
if let audioUrl = audioFilePath, URL(fileURLWithPath: audioUrl).deletingPathExtension().lastPathComponent.contains(URL(fileURLWithPath: self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").deletingPathExtension().lastPathComponent) {
|
|
@@ -509,7 +524,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
BFLog(1, message: "卡在录音尾巴上了1")
|
|
BFLog(1, message: "卡在录音尾巴上了1")
|
|
newSubtitle.timelineIn = newSubtitle.timelineIn - CMTime(seconds: 0.1, preferredTimescale: 1000)
|
|
newSubtitle.timelineIn = newSubtitle.timelineIn - CMTime(seconds: 0.1, preferredTimescale: 1000)
|
|
}
|
|
}
|
|
- }else {
|
|
|
|
|
|
+ } else {
|
|
BFLog(1, message: "没有对应音频播放记录,出现错误!!!!!!")
|
|
BFLog(1, message: "没有对应音频播放记录,出现错误!!!!!!")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
@@ -531,12 +546,13 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
}
|
|
}
|
|
|
|
|
|
// MARK: - 录音结束
|
|
// MARK: - 录音结束
|
|
|
|
+
|
|
recorderManager?.endRecordHandle = { [weak self, weak recorderManager] voideModel, _ in
|
|
recorderManager?.endRecordHandle = { [weak self, weak recorderManager] voideModel, _ in
|
|
if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
// 加入到语音数组里
|
|
// 加入到语音数组里
|
|
-
|
|
|
|
|
|
+
|
|
model.endCMTime = sself.currentAssetProgress
|
|
model.endCMTime = sself.currentAssetProgress
|
|
- BFLog(1, message: "对应关系:录制结束文件地址:\((model.wavFilePath ?? "") .replacingOccurrences(of: documensDirectory, with: "")) 开始录音前地址:\((self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").replacingOccurrences(of: documensDirectory, with: ""))- \(model.startCMTime.seconds)-\(model.endCMTime.seconds)-\(model.endCMTime.seconds - model.startCMTime.seconds)")
|
|
|
|
|
|
+ BFLog(1, message: "对应关系:录制结束文件地址:\((model.wavFilePath ?? "").replacingOccurrences(of: documensDirectory, with: "")) 开始录音前地址:\((self?.recorderManager?.voiceModel?.wavFilePath ?? "aa").replacingOccurrences(of: documensDirectory, with: ""))- \(model.startCMTime.seconds)-\(model.endCMTime.seconds)-\(model.endCMTime.seconds - model.startCMTime.seconds)")
|
|
/// 注:录音机回调的录音时长大于一秒,而业务逻辑计算的会小于一秒
|
|
/// 注:录音机回调的录音时长大于一秒,而业务逻辑计算的会小于一秒
|
|
if (model.endCMTime.seconds - model.startCMTime.seconds) < 1 {
|
|
if (model.endCMTime.seconds - model.startCMTime.seconds) < 1 {
|
|
// 取消录制
|
|
// 取消录制
|
|
@@ -625,8 +641,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
BFLog(3, message: "重置播放index-\(#function) = \(sself.currentPlayRecordIndex)")
|
|
BFLog(3, message: "重置播放index-\(#function) = \(sself.currentPlayRecordIndex)")
|
|
recorderManager?.voiceModel = nil
|
|
recorderManager?.voiceModel = nil
|
|
- }
|
|
|
|
- else{
|
|
|
|
|
|
+ } else {
|
|
BFLog(2, message: "数据出错!!!!\(voideModel?.wavFilePath ?? "")")
|
|
BFLog(2, message: "数据出错!!!!\(voideModel?.wavFilePath ?? "")")
|
|
}
|
|
}
|
|
}
|
|
}
|
|
@@ -955,7 +970,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
var event = WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)])
|
|
var event = WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)])
|
|
event.deletedTittles = deleteTitles(voiceModel: model)
|
|
event.deletedTittles = deleteTitles(voiceModel: model)
|
|
events.append(event)
|
|
events.append(event)
|
|
-
|
|
|
|
|
|
+
|
|
// 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
|
|
// 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
let currDuration = model.endCMTime.seconds - model.startCMTime.seconds
|
|
let currDuration = model.endCMTime.seconds - model.startCMTime.seconds
|
|
@@ -1022,7 +1037,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
model.currIndex = currItemModelIndex
|
|
model.currIndex = currItemModelIndex
|
|
model.volume = 100
|
|
model.volume = 100
|
|
recorderManager?.voiceModel = model
|
|
recorderManager?.voiceModel = model
|
|
- BFLog(3,message: "开始录制-开始:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),\(model)")
|
|
|
|
|
|
+ BFLog(3, message: "开始录制-开始:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),\(model)")
|
|
recorderManager?.startRecord()
|
|
recorderManager?.startRecord()
|
|
recorderManager?.audioRecorder?.startNeoNui(NeoNuiToken ?? "", appid: NeoNuiAPPID ?? "")
|
|
recorderManager?.audioRecorder?.startNeoNui(NeoNuiToken ?? "", appid: NeoNuiAPPID ?? "")
|
|
isRecording = true
|
|
isRecording = true
|
|
@@ -1056,10 +1071,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
beginOnStartBtn = false
|
|
beginOnStartBtn = false
|
|
|
|
|
|
recordBtn.isEnabled = false
|
|
recordBtn.isEnabled = false
|
|
- DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) {[weak self] in
|
|
|
|
|
|
+ DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) { [weak self] in
|
|
self?.recordBtn.isEnabled = true
|
|
self?.recordBtn.isEnabled = true
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
isRecording = false
|
|
isRecording = false
|
|
// progressThumV.progressView.isUserInteractionEnabled = true
|
|
// progressThumV.progressView.isUserInteractionEnabled = true
|
|
// collectionView.isScrollEnabled = true
|
|
// collectionView.isScrollEnabled = true
|
|
@@ -1085,7 +1100,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
if !avatarView.isHidden {
|
|
if !avatarView.isHidden {
|
|
avatarView.endRecord()
|
|
avatarView.endRecord()
|
|
}
|
|
}
|
|
-
|
|
|
|
}
|
|
}
|
|
|
|
|
|
@objc func cancleRecord() {
|
|
@objc func cancleRecord() {
|
|
@@ -1100,7 +1114,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
/// 不足一秒,主动取消录制
|
|
/// 不足一秒,主动取消录制
|
|
/// - Parameter voiceModel: <#voiceModel description#>
|
|
/// - Parameter voiceModel: <#voiceModel description#>
|
|
@objc func recordManagerCancelRecord(voiceModel: PQVoiceModel?) {
|
|
@objc func recordManagerCancelRecord(voiceModel: PQVoiceModel?) {
|
|
- BFLog(3,message: "开始录制-取消:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),currIndex=\(voiceModel?.currIndex ?? 0),\(String(describing: voiceModel)),\(String(describing:recorderManager?.voiceModel))")
|
|
|
|
|
|
+ BFLog(3, message: "开始录制-取消:currentAssetProgress=\(currentAssetProgress.seconds),cuInde=\(currItemModelIndex),currIndex=\(voiceModel?.currIndex ?? 0),\(String(describing: voiceModel)),\(String(describing: recorderManager?.voiceModel))")
|
|
if voiceModel != nil, currentAssetProgress.seconds - (recorderManager?.voiceModel?.startCMTime.seconds ?? 0) < 1.0 {
|
|
if voiceModel != nil, currentAssetProgress.seconds - (recorderManager?.voiceModel?.startCMTime.seconds ?? 0) < 1.0 {
|
|
cShowHUB(superView: nil, msg: "最短录制1秒")
|
|
cShowHUB(superView: nil, msg: "最短录制1秒")
|
|
}
|
|
}
|
|
@@ -1253,6 +1267,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
@objc func playVideo(btn: UIButton) {
|
|
@objc func playVideo(btn: UIButton) {
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0 {
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0 {
|
|
BFLog(message: "图片没有录音无法播放")
|
|
BFLog(message: "图片没有录音无法播放")
|
|
|
|
+ btn.isSelected = true
|
|
return
|
|
return
|
|
}
|
|
}
|
|
btn.isSelected = !btn.isSelected
|
|
btn.isSelected = !btn.isSelected
|
|
@@ -1406,7 +1421,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
// MARK: - 音视频处理
|
|
// MARK: - 音视频处理
|
|
|
|
|
|
func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed _: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
|
|
func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed _: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
|
|
-
|
|
|
|
// if currentPlayRecordIndex >= 0 {
|
|
// if currentPlayRecordIndex >= 0 {
|
|
// if assetPlayer?.volume != haveSpeakVolume{
|
|
// if assetPlayer?.volume != haveSpeakVolume{
|
|
// assetPlayer?.volume = haveSpeakVolume
|
|
// assetPlayer?.volume = haveSpeakVolume
|
|
@@ -1416,23 +1430,23 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
// assetPlayer?.volume = noSpeakVolume
|
|
// assetPlayer?.volume = noSpeakVolume
|
|
// }
|
|
// }
|
|
// }
|
|
// }
|
|
-
|
|
|
|
|
|
+
|
|
if itemModels[currItemModelIndex].voiceStickers.first(where: { m in
|
|
if itemModels[currItemModelIndex].voiceStickers.first(where: { m in
|
|
- return CMTimeCompare(m.startCMTime, currentT) <= 0 && CMTimeCompare(currentT, m.endCMTime) <= 0
|
|
|
|
- }) != nil{
|
|
|
|
- if assetPlayer?.volume != haveSpeakVolume{
|
|
|
|
|
|
+ CMTimeCompare(m.startCMTime, currentT) <= 0 && CMTimeCompare(currentT, m.endCMTime) <= 0
|
|
|
|
+ }) != nil {
|
|
|
|
+ if assetPlayer?.volume != haveSpeakVolume {
|
|
assetPlayer?.volume = haveSpeakVolume
|
|
assetPlayer?.volume = haveSpeakVolume
|
|
}
|
|
}
|
|
- }else{
|
|
|
|
|
|
+ } else {
|
|
if assetPlayer?.volume != noSpeakVolume {
|
|
if assetPlayer?.volume != noSpeakVolume {
|
|
assetPlayer?.volume = noSpeakVolume
|
|
assetPlayer?.volume = noSpeakVolume
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
|
|
if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
|
|
return
|
|
return
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// 先排序,再查找下一个需要播放的录音
|
|
// 先排序,再查找下一个需要播放的录音
|
|
let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
|
|
let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
|
|
m1.startCMTime.seconds < m2.startCMTime.seconds
|
|
m1.startCMTime.seconds < m2.startCMTime.seconds
|
|
@@ -1440,14 +1454,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
|
|
let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
|
|
model.1.endCMTime.seconds > CMTimeGetSeconds(currentT)
|
|
model.1.endCMTime.seconds > CMTimeGetSeconds(currentT)
|
|
} ?? (-1, nil)
|
|
} ?? (-1, nil)
|
|
-
|
|
|
|
|
|
+
|
|
// 没找到,代表后边没有录音需求了
|
|
// 没找到,代表后边没有录音需求了
|
|
guard let recordedAudio = recordedAudio, recordedAudio.wavFilePath.count > 0 else {
|
|
guard let recordedAudio = recordedAudio, recordedAudio.wavFilePath.count > 0 else {
|
|
BFLog(3, message: "未找到可播放录音")
|
|
BFLog(3, message: "未找到可播放录音")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startCMTime.seconds) ~ \(recordedAudio.endCMTime.seconds), \(recordedAudio.wavFilePath ?? "")")
|
|
BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startCMTime.seconds) ~ \(recordedAudio.endCMTime.seconds), \(recordedAudio.wavFilePath ?? "")")
|
|
-
|
|
|
|
|
|
+
|
|
// 创建播放器
|
|
// 创建播放器
|
|
if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
|
|
if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
|
|
let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
|
|
let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
|
|
@@ -1481,7 +1495,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
}
|
|
}
|
|
recordPlayerTimeObserver?.invalidate()
|
|
recordPlayerTimeObserver?.invalidate()
|
|
recordPlayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
|
|
recordPlayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self, weak recordPlayer] time in
|
|
- guard let sself = self,let rPlay = recordPlayer else {
|
|
|
|
|
|
+ guard let sself = self, let rPlay = recordPlayer else {
|
|
BFLog(3, message: "sself为空")
|
|
BFLog(3, message: "sself为空")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
@@ -1621,7 +1635,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
movie?.startProcessing()
|
|
movie?.startProcessing()
|
|
movieIsProcessing = true
|
|
movieIsProcessing = true
|
|
}
|
|
}
|
|
- //add by ak 切换段落时会有一小段原素材的声音播放时先把视频原音量设置为0
|
|
|
|
|
|
+ // add by ak 切换段落时会有一小段原素材的声音播放时先把视频原音量设置为0
|
|
assetPlayer?.volume = 0
|
|
assetPlayer?.volume = 0
|
|
assetPlayer?.play()
|
|
assetPlayer?.play()
|
|
} else {
|
|
} else {
|
|
@@ -1640,14 +1654,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
func pause() {
|
|
func pause() {
|
|
BFLog(1, message: "暂停播放")
|
|
BFLog(1, message: "暂停播放")
|
|
isNormalPlaying = false
|
|
isNormalPlaying = false
|
|
-
|
|
|
|
|
|
+
|
|
// ---- 修复暂停播放回退问题
|
|
// ---- 修复暂停播放回退问题
|
|
avplayerTimeObserver?.invalidate()
|
|
avplayerTimeObserver?.invalidate()
|
|
avplayerTimeObserver = nil
|
|
avplayerTimeObserver = nil
|
|
recordPlayerTimeObserver?.invalidate()
|
|
recordPlayerTimeObserver?.invalidate()
|
|
recordPlayerTimeObserver = nil
|
|
recordPlayerTimeObserver = nil
|
|
// ----
|
|
// ----
|
|
-
|
|
|
|
|
|
+
|
|
subtitleBtn.isHidden = false
|
|
subtitleBtn.isHidden = false
|
|
soundSettingBtn.isHidden = false
|
|
soundSettingBtn.isHidden = false
|
|
withDrawBtn.isHidden = false
|
|
withDrawBtn.isHidden = false
|
|
@@ -1659,7 +1673,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
pauseTime = currentAssetProgress.seconds
|
|
pauseTime = currentAssetProgress.seconds
|
|
currentPlayRecordIndex = -1
|
|
currentPlayRecordIndex = -1
|
|
hadPrepareToPlayRecord = false
|
|
hadPrepareToPlayRecord = false
|
|
-
|
|
|
|
|
|
+
|
|
// 暂停状态
|
|
// 暂停状态
|
|
playBtn.isSelected = (itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0)
|
|
playBtn.isSelected = (itemModels[currItemModelIndex].mediaType == .IMAGE && itemModels[currItemModelIndex].voiceStickers.count <= 0)
|
|
}
|
|
}
|
|
@@ -1990,10 +2004,11 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
|
|
|
|
|
|
public func collectionView(_: UICollectionView, didSelectItemAt _: IndexPath) {}
|
|
public func collectionView(_: UICollectionView, didSelectItemAt _: IndexPath) {}
|
|
|
|
|
|
- public func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
|
|
|
|
|
|
+ public func scrollViewWillBeginDragging(_: UIScrollView) {
|
|
BFLog(1, message: "开始滚动")
|
|
BFLog(1, message: "开始滚动")
|
|
recordBtn.isEnabled = false
|
|
recordBtn.isEnabled = false
|
|
}
|
|
}
|
|
|
|
+
|
|
public func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
|
|
public func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
|
|
endScrollItem(scrollView)
|
|
endScrollItem(scrollView)
|
|
}
|
|
}
|
|
@@ -2007,7 +2022,7 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
|
|
func endScrollItem(_ scrollView: UIScrollView) {
|
|
func endScrollItem(_ scrollView: UIScrollView) {
|
|
// public func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
|
// public func scrollViewDidScroll(_ scrollView: UIScrollView) {
|
|
BFLog(1, message: "滚动结束")
|
|
BFLog(1, message: "滚动结束")
|
|
-
|
|
|
|
|
|
+
|
|
let page = Int((scrollView.contentOffset.x + scrollView.frame.width / 2) / scrollView.frame.width)
|
|
let page = Int((scrollView.contentOffset.x + scrollView.frame.width / 2) / scrollView.frame.width)
|
|
if page != currItemModelIndex {
|
|
if page != currItemModelIndex {
|
|
// 切换素材时先把录制状态切为不可用,延迟可点,避免在缩略图未加载出来时即可录制
|
|
// 切换素材时先把录制状态切为不可用,延迟可点,避免在缩略图未加载出来时即可录制
|
|
@@ -2048,10 +2063,10 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
|
|
// 重置播放器
|
|
// 重置播放器
|
|
assetPlayer?.seek(to: CMTime.zero)
|
|
assetPlayer?.seek(to: CMTime.zero)
|
|
recordPlayer?.seek(to: CMTime.zero)
|
|
recordPlayer?.seek(to: CMTime.zero)
|
|
-
|
|
|
|
|
|
+
|
|
if let voice = itemModels[page].voiceStickers.enumerated().first(where: { m in
|
|
if let voice = itemModels[page].voiceStickers.enumerated().first(where: { m in
|
|
m.1.startTime == 0
|
|
m.1.startTime == 0
|
|
- }){
|
|
|
|
|
|
+ }) {
|
|
currentPlayRecordIndex = voice.0
|
|
currentPlayRecordIndex = voice.0
|
|
}
|
|
}
|
|
|
|
|
|
@@ -2101,7 +2116,7 @@ public extension BFRecordScreenController {
|
|
recordStartPlayTime = currentAssetProgress
|
|
recordStartPlayTime = currentAssetProgress
|
|
currenStartPlayTime = CMTime.zero
|
|
currenStartPlayTime = CMTime.zero
|
|
}
|
|
}
|
|
- playRecord(at: time, periodicTimeObserver: { [weak self] currentT, currentItem in
|
|
|
|
|
|
+ playRecord(at: time, periodicTimeObserver: { [weak self] currentT, _ in
|
|
// BFLog(1, message: "播放录音进度:\(currentT.seconds),\(currentItem)")
|
|
// BFLog(1, message: "播放录音进度:\(currentT.seconds),\(currentItem)")
|
|
if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE, self?.isNormalPlaying ?? false {
|
|
if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE, self?.isNormalPlaying ?? false {
|
|
self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
|
|
self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
|