|
@@ -47,7 +47,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
withDrawBtn.isHidden = isRecording
|
|
|
changeVoiceBtn.isHidden = isRecording
|
|
|
recordBtn.setTitle(isRecording ? "松手 完成" : "按住 说话", for: .normal)
|
|
|
- recordBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67", alpha: isRecording ? 0.6 : 1)
|
|
|
+ recordBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#389AFF", alpha: isRecording ? 0.6 : 1)
|
|
|
playBtn.isSelected = isRecording
|
|
|
// if !isRecording {
|
|
|
// BFLog(1, message: "stop")
|
|
@@ -70,6 +70,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
var currentAssetProgress: CMTime = .zero // 当前素材播放的进度
|
|
|
// 播放器开始播放时间
|
|
|
var recordStartPlayTime: CMTime = .zero
|
|
|
+ // 某个录音开始播放时间
|
|
|
+ var currenStartPlayTime: CMTime = .zero
|
|
|
var recordStartTime: Double = 0 // 录制开始时间
|
|
|
var pauseTime: Double = 0 // 停止无操作的时间点
|
|
|
|
|
@@ -468,11 +470,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
var duration: Double = 0
|
|
|
sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { temp in
|
|
|
- duration = duration + (Double(temp.duration ?? "0") ?? 0)
|
|
|
+ temp.duration = "\(temp.endTime - temp.startTime)"
|
|
|
+ duration = duration + (temp.endTime - temp.startTime)
|
|
|
}
|
|
|
sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
- model.endTime = (self?.recordStartTime ?? 0) + (Double(model.duration ?? "0") ?? 0)
|
|
|
- sself.currentAssetProgress = CMTime(seconds: model.endTime, preferredTimescale: 1000)
|
|
|
self?.isEndPlay = true
|
|
|
}
|
|
|
DispatchQueue.main.async { [weak self] in
|
|
@@ -809,7 +810,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
indirectionView?.deleteItem(index: isStopAtRecordRange)
|
|
|
// 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
|
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
|
- let currDuration = (Double(model.duration ?? "0") ?? 0)
|
|
|
+ let currDuration = model.endTime - model.startTime
|
|
|
itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion - currDuration
|
|
|
currentAssetProgress = CMTime(seconds: model.startTime, preferredTimescale: 1000)
|
|
|
// 更新进度
|
|
@@ -1119,20 +1120,15 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
// MARK: - 音视频处理
|
|
|
|
|
|
- func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedAudio: PQVoiceModel?, _ currentItem: AVPlayerItem?) -> Void, playFailed: @escaping (_ recordedAudio: PQVoiceModel?, _ currentItem: AVPlayerItem?) -> Void) {
|
|
|
+ func playRecord(at currentT: CMTime, periodicTimeObserver: @escaping (_ time: CMTime, _ currentItem: AVPlayerItem) -> Void, didPlayToEndTime: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void, playFailed: @escaping (_ recordedInfo: (Int, PQVoiceModel)?, _ currentItem: AVPlayerItem?) -> Void) {
|
|
|
if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
|
|
|
return
|
|
|
}
|
|
|
- let type = itemModels[currItemModelIndex].mediaType
|
|
|
let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
|
|
|
m1.startTime < m2.startTime
|
|
|
}
|
|
|
let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
|
|
|
- if type == .IMAGE {
|
|
|
- return model.1.startTime >= CMTimeGetSeconds(currentT)
|
|
|
- } else {
|
|
|
- return model.1.endTime > CMTimeGetSeconds(currentT)
|
|
|
- }
|
|
|
+ model.1.endTime > CMTimeGetSeconds(currentT)
|
|
|
} ?? (-1, nil)
|
|
|
|
|
|
guard let recordedAudio = recordedAudio, recordedAudio.wavFilePath.count > 0 else {
|
|
@@ -1166,17 +1162,20 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
self?.assetPlayer?.volume = self?.noSpeakVolume ?? 0
|
|
|
// self?.deleteRecordBtn.isHidden = true
|
|
|
// self?.recordBtn.isHidden = false
|
|
|
- didPlayToEndTime(recordedAudio, newItem)
|
|
|
+ didPlayToEndTime((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), newItem)
|
|
|
}
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
- avplayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { time in
|
|
|
- BFLog(1, message: "")
|
|
|
- periodicTimeObserver(time, newItem)
|
|
|
+ avplayerTimeObserver = recordPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
|
|
|
+ BFLog(3, message: "当前播放---\(time),\(time.seconds)")
|
|
|
+ if CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero) <= 0 {
|
|
|
+ self?.currenStartPlayTime = time
|
|
|
+ }
|
|
|
+ periodicTimeObserver(CMTime(seconds: CMTimeGetSeconds(time) - CMTimeGetSeconds(self?.currenStartPlayTime ?? CMTime.zero), preferredTimescale: 1000), newItem)
|
|
|
} as? NSKeyValueObservation
|
|
|
}
|
|
|
if recordPlayer?.currentItem?.duration.timescale == 0 {
|
|
|
BFLog(1, message: "时间timescale == 0")
|
|
|
- playFailed(recordedAudio, recordPlayer?.currentItem)
|
|
|
+ playFailed((shouldPlayRecordIndex, recordedAudio) as? (Int, PQVoiceModel), recordPlayer?.currentItem)
|
|
|
return
|
|
|
}
|
|
|
synced(currentPlayRecordIndex) { [weak self] in
|
|
@@ -1187,7 +1186,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
if !hadPrepareToPlayRecord,
|
|
|
recordPlayer?.currentItem?.duration.timescale != 0,
|
|
|
- CMTimeGetSeconds(currentT) >= recordedAudio.startTime,
|
|
|
+ CMTimeGetSeconds(currentT) >= (recordedAudio.startTime - 0.1),
|
|
|
CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
|
|
|
{
|
|
|
// 应当开始播放了
|
|
@@ -1335,8 +1334,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// 播放对应的录音音频
|
|
|
self?.playRecord(at: time, periodicTimeObserver: { currentT, currentItem in
|
|
|
BFLog(message: "播放一段进度:\(currentT),\(currentItem)")
|
|
|
- }, didPlayToEndTime: { startT, _ in
|
|
|
- BFLog(message: "播放一段结束:\(startT?.endTime ?? 0)")
|
|
|
+ }, didPlayToEndTime: { recordInfo, _ in
|
|
|
+ BFLog(message: "播放一段结束:\(recordInfo?.1.endTime ?? 0)")
|
|
|
}, playFailed: { _, _ in })
|
|
|
}
|
|
|
} as? NSKeyValueObservation
|
|
@@ -1601,12 +1600,15 @@ public extension BFRecordScreenController {
|
|
|
return
|
|
|
}
|
|
|
isNormalPlaying = true
|
|
|
+ // 当开始播放时重置录音播放起始时间
|
|
|
+ recordStartPlayTime = currentAssetProgress
|
|
|
+ currenStartPlayTime = CMTime.zero
|
|
|
playRecord(at: currentAssetProgress, periodicTimeObserver: { [weak self] currentT, currentItem in
|
|
|
BFLog(1, message: "播放第段进度:\(currentT),\(currentItem)")
|
|
|
self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
|
|
|
- }, didPlayToEndTime: { [weak self] recordItem, currentItem in
|
|
|
- BFLog(message: "播放第段结束:\(String(describing: recordItem)),\(String(describing: currentItem))")
|
|
|
- if (recordItem?.endTime ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endTime ?? 0) {
|
|
|
+ }, didPlayToEndTime: { [weak self] recordInfo, currentItem in
|
|
|
+ BFLog(message: "播放第段结束:\(String(describing: recordInfo?.1)),\(String(describing: currentItem))")
|
|
|
+ if (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.count ?? 0) <= ((recordInfo?.0 ?? 0) + 1) || (recordInfo?.1.endTime ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endTime ?? 0) {
|
|
|
self?.isEndPlay = true
|
|
|
self?.pause()
|
|
|
// 注:矫正进度--播放结束后当前指针应该到当前素材总时长
|
|
@@ -1614,9 +1616,7 @@ public extension BFRecordScreenController {
|
|
|
self?.resetCurrentProgress()
|
|
|
} else {
|
|
|
// 注:矫正进度--一段录音播放结束后当前指针应该到当前录音结束点
|
|
|
- self?.currentAssetProgress = CMTime(seconds: recordItem?.endTime ?? 0, preferredTimescale: 1000)
|
|
|
- // 当开始播放时重置录音播放起始时间
|
|
|
- self?.recordStartPlayTime = self?.currentAssetProgress ?? CMTime.zero
|
|
|
+ self?.currentAssetProgress = CMTime(seconds: self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers[(recordInfo?.0 ?? 0) + 1].startTime ?? 0, preferredTimescale: 1000)
|
|
|
self?.imageRecordPlay()
|
|
|
}
|
|
|
}) { [weak self] _, _ in
|