|
@@ -31,6 +31,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
public var recordRndHandle: ((_ currentRecord: PQVoiceModel?) -> Void)?
|
|
|
// 字幕按钮点击上报
|
|
|
public var subTitleBtnClickHandle: ((_ isOn: Bool) -> Void)?
|
|
|
+
|
|
|
// MARK: - 录制参数
|
|
|
|
|
|
public var assets = [PHAsset]()
|
|
@@ -98,12 +99,11 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
AVEncoderBitDepthHintKey: 16, // 位深
|
|
|
AVEncoderAudioQualityKey: AVAudioQuality.medium.rawValue] // 音频质量
|
|
|
|
|
|
- public var haveSpeakVolume : Float = 0.0
|
|
|
- public var noSpeakVolume : Float = 1.0
|
|
|
-
|
|
|
- //录音管理器
|
|
|
- var recorderManager : BFVoiceRecordManager?
|
|
|
-
|
|
|
+ public var haveSpeakVolume: Float = 0.0
|
|
|
+ public var noSpeakVolume: Float = 1.0
|
|
|
+
|
|
|
+ // 录音管理器
|
|
|
+ var recorderManager: BFVoiceRecordManager?
|
|
|
|
|
|
// MARK: - 视图参数
|
|
|
|
|
@@ -293,7 +293,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return audioSettingView
|
|
|
}()
|
|
|
|
|
|
-
|
|
|
lazy var progressThumV: BFVideoThumbProgressView = {
|
|
|
let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
|
|
|
vv.dragStartHandle = { [weak self] in
|
|
@@ -386,32 +385,17 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
|
|
|
BFLog(message: "nls appkey is \(appkey), token is \(token)")
|
|
|
|
|
|
- self?.recorderManager = BFVoiceRecordManager.init(token: token, appid: appkey)
|
|
|
-
|
|
|
- //录音取消
|
|
|
+ self?.recorderManager = BFVoiceRecordManager(token: token, appid: appkey)
|
|
|
+
|
|
|
+ // 录音取消
|
|
|
self?.recorderManager?.cancelRecordHandle = { _ in
|
|
|
}
|
|
|
- //录音进度
|
|
|
- self?.recorderManager?.recorderProgrossHandle = {[weak self] progress in
|
|
|
- BFLog(1, message: "curr:录音进度--\(progress) \(self?.recordStartTime ) \(self?.isRecording)")
|
|
|
- if self?.indirectionView == nil {
|
|
|
- self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
|
|
|
- self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
|
|
|
- }
|
|
|
- // 更新录制进度
|
|
|
- // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
- // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
- if self?.isRecording ?? false {
|
|
|
- let ratioX = 0.08
|
|
|
- self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: (progress ?? 0.0) - ratioX)
|
|
|
- }
|
|
|
- if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
|
|
|
- self?.imageRecordProgress(isRecord: true, progress: progress ?? 0)
|
|
|
- }
|
|
|
-
|
|
|
-
|
|
|
+ // 录音进度
|
|
|
+ self?.recorderManager?.recorderProgrossHandle = { [weak self] progress in
|
|
|
+ BFLog(1, message: "curr:录音进度--\(progress) \(self?.recordStartTime) \(self?.isRecording)")
|
|
|
+ self?.drawProgressIndication(progress: progress ?? 0)
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
//录音字幕回调
|
|
|
self?.recorderManager?.subtitleRecordHandle = {[weak self] asrResult ,audioFilePath in
|
|
|
if(asrResult == nil){
|
|
@@ -422,8 +406,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
let header = dicResult?["header"] as? [String: Any]
|
|
|
let payload = dicResult?["payload"] as? [String: Any]
|
|
|
-
|
|
|
-
|
|
|
+
|
|
|
BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(self?.recorderManager?.voiceModel?.startTime ?? 0.0)")
|
|
|
DispatchQueue.main.async {
|
|
|
// 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
@@ -433,8 +416,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
newSubtitle.taskID = (header?["task_id"] as? String) ?? ""
|
|
|
|
|
|
//这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
+
|
|
|
newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
-
|
|
|
+
|
|
|
newSubtitle.timelineOut = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
if showText.count > subtitleMaxlength {
|
|
@@ -449,9 +433,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.append(newSubtitle)
|
|
|
}
|
|
|
-
|
|
|
}
|
|
|
- //录音结束
|
|
|
+ // 录音结束
|
|
|
self?.recorderManager?.endRecordHandle = { [weak self] voideModel, _ in
|
|
|
if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
|
// 加入到语音数组里
|
|
@@ -474,7 +457,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
|
|
|
}
|
|
|
BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
-
|
|
|
+
|
|
|
var event = sself.events.last
|
|
|
if event != nil {
|
|
|
event!.deletedVoices = deletedVoices
|
|
@@ -484,19 +467,21 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
var duration: Double = 0
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
|
|
|
- duration = duration + (Double(model.duration ?? "0") ?? 0)
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { temp in
|
|
|
+ duration = duration + (Double(temp.duration ?? "0") ?? 0)
|
|
|
}
|
|
|
sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
- sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
|
|
|
- model.endTime = sself.currentAssetProgress.seconds
|
|
|
+ model.endTime = (self?.recordStartTime ?? 0) + (Double(model.duration ?? "0") ?? 0)
|
|
|
+ sself.currentAssetProgress = CMTime(seconds: model.endTime, preferredTimescale: 1000)
|
|
|
self?.isEndPlay = true
|
|
|
}
|
|
|
- DispatchQueue.main.async {[weak self] in
|
|
|
+ DispatchQueue.main.async { [weak self] in
|
|
|
// 录音完,重绘撤销按钮,更新录音按钮,
|
|
|
self?.changeWithDrawBtnLayout(true)
|
|
|
// 注:在录制结束时矫正当前位置,避免跟指针无法对其
|
|
|
self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
|
|
|
+ // 矫正进度
|
|
|
+ self?.resetCurrentProgress()
|
|
|
self?.deleteRecordBtn.isHidden = true
|
|
|
self?.recordBtn.isHidden = false
|
|
|
}
|
|
@@ -505,6 +490,16 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
sself.recordStartTime = 0
|
|
|
}
|
|
|
}
|
|
|
+ self?.recorderManager?.cancelRecordHandle = { [weak self] _ in
|
|
|
+ // 取消录制以后重置进度
|
|
|
+ self?.withdrawAction()
|
|
|
+ // 重置录制开始时间
|
|
|
+ self?.recordStartTime = 0
|
|
|
+ /// 重置进度
|
|
|
+ self?.resetCurrentProgress()
|
|
|
+ // 移除
|
|
|
+ self?.indirectionView?.deleteItem(isCurrent: true)
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
view.backgroundColor = .black
|
|
@@ -536,8 +531,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-
|
|
|
avatarView.recordEndCallBack = { _, materialsModel in
|
|
|
BFLog(message: "新录制完成::::\(materialsModel?.locationPath ?? "")")
|
|
|
}
|
|
@@ -592,7 +585,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
/// 更新字幕,在回放时使用
|
|
|
/// - Parameter time: 当前播放的进度
|
|
|
func updateSubtitle(time: CMTime) {
|
|
|
-
|
|
|
BFLog(message: "currTime is \(CMTimeGetSeconds(time))")
|
|
|
var findShowSubtitle: PQEditSubTitleModel?
|
|
|
for (index, subtitle) in itemModels[currItemModelIndex].titleStickers.enumerated() {
|
|
@@ -815,6 +807,24 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
itemModels[currItemModelIndex].voiceStickers.remove(at: isStopAtRecordRange)
|
|
|
events.append(WithDrawModel(type: 3, timestamp: currentAssetProgress.seconds, deletedVoices: [(model, isStopAtRecordRange)]))
|
|
|
indirectionView?.deleteItem(index: isStopAtRecordRange)
|
|
|
+ // 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
|
|
|
+ if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
|
+ let currDuration = (Double(model.duration ?? "0") ?? 0)
|
|
|
+ itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion - currDuration
|
|
|
+ currentAssetProgress = CMTime(seconds: model.startTime, preferredTimescale: 1000)
|
|
|
+ // 更新进度
|
|
|
+ resetCurrentProgress()
|
|
|
+ for (index, item) in itemModels[currItemModelIndex].voiceStickers.enumerated() {
|
|
|
+ if index >= isStopAtRecordRange, index > 0 {
|
|
|
+ // 注:开始时间减去duration or 等一前一段录音的结束时间
|
|
|
+// item.startTime = item.startTime - currDuration
|
|
|
+ item.startTime = itemModels[currItemModelIndex].voiceStickers[index - 1].endTime
|
|
|
+ item.endTime = item.startTime + (Double(item.duration ?? "0") ?? 0)
|
|
|
+ }
|
|
|
+ }
|
|
|
+ // 重绘录音进度视图
|
|
|
+ indirectionView?.resetAllSubViews(items: itemModels[currItemModelIndex].voiceStickers, percenWidth: progressThumV.thumbImageWidth / 2.0, totalDuration: itemModels[currItemModelIndex].materialDuraion)
|
|
|
+ }
|
|
|
searchStopAtRecordRange()
|
|
|
deleteTitles(voiceModel: model)
|
|
|
}
|
|
@@ -831,12 +841,11 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
pause()
|
|
|
isRecording = true
|
|
|
|
|
|
- if(self.recorderManager == nil){
|
|
|
+ if recorderManager == nil {
|
|
|
BFLog(message: "录音机初始化错误!!!")
|
|
|
return
|
|
|
-
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
let model = PQVoiceModel()
|
|
|
// 开始时间
|
|
|
model.startTime = currentAssetProgress.seconds
|
|
@@ -851,7 +860,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
isRecording = true
|
|
|
|
|
|
-
|
|
|
if !avatarView.isHidden {
|
|
|
avatarView.beginRecord()
|
|
|
}
|
|
@@ -870,7 +878,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
@objc func endRecord() {
|
|
|
-
|
|
|
// 存储录音
|
|
|
isRecording = false
|
|
|
pause()
|
|
@@ -920,7 +927,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
jumpTime = model.startTime
|
|
|
-
|
|
|
+
|
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
|
itemModels[currItemModelIndex].materialDuraion = jumpTime
|
|
|
}
|
|
@@ -949,7 +956,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
} else {}
|
|
|
events.removeLast()
|
|
|
-
|
|
|
+
|
|
|
let dur = itemModels[currItemModelIndex].materialDuraion
|
|
|
if dur > 0 {
|
|
|
changeProgress(progress: Float(jumpTime / dur))
|
|
@@ -1011,7 +1018,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
// 是否吸附在录音首尾处
|
|
|
- func searchStopAtRecordRange(needAdsorb:Bool = false) {
|
|
|
+ func searchStopAtRecordRange(needAdsorb: Bool = false) {
|
|
|
pauseTime = currentAssetProgress.seconds
|
|
|
|
|
|
// TODO: 滑动,播放暂停,撤销时,判断是否停止录音区间,是则删除相关录音,画笔,头像,字幕
|
|
@@ -1026,7 +1033,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if needAdsorb {
|
|
|
if fabs(elems[0].1.endTime - currentAssetProgress.seconds) < 0.5 {
|
|
|
BFLog(1, message: "吸附在录音结尾")
|
|
|
- // changeWithDrawBtnLayout(false)
|
|
|
+ // changeWithDrawBtnLayout(false)
|
|
|
changeProgress(progress: Float(elems[0].1.endTime / itemModels[currItemModelIndex].materialDuraion))
|
|
|
progressThumV.progress = elems[0].1.endTime
|
|
|
|
|
@@ -1037,7 +1044,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
} else {
|
|
|
if fabs(elems[0].1.startTime - currentAssetProgress.seconds) < 0.5 {
|
|
|
BFLog(1, message: "吸附在录音开始")
|
|
|
- // changeWithDrawBtnLayout(true)
|
|
|
+ // changeWithDrawBtnLayout(true)
|
|
|
changeProgress(progress: Float(elems[0].1.startTime / itemModels[currItemModelIndex].materialDuraion))
|
|
|
progressThumV.progress = elems[0].1.startTime
|
|
|
}
|
|
@@ -1047,15 +1054,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
BFLog(1, message: "停在了录音区间里 \(isStopAtRecordRange)")
|
|
|
}
|
|
|
- }else{
|
|
|
+ } else {
|
|
|
deleteRecordBtn.isHidden = false
|
|
|
recordBtn.isHidden = true
|
|
|
|
|
|
isStopAtRecordRange = elems.first!.0
|
|
|
BFLog(1, message: "停在了录音区间里 \(isStopAtRecordRange)")
|
|
|
-
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
} else {
|
|
|
deleteRecordBtn.isHidden = true
|
|
|
recordBtn.isHidden = false
|
|
@@ -1118,21 +1124,24 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return
|
|
|
}
|
|
|
let type = itemModels[currItemModelIndex].mediaType
|
|
|
- let (shouldPlayRecordIndex, recordedAudio) = itemModels[currItemModelIndex].voiceStickers.enumerated().first { model in
|
|
|
+ let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
|
|
|
+ m1.startTime < m2.startTime
|
|
|
+ }
|
|
|
+ let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
|
|
|
if type == .IMAGE {
|
|
|
return model.1.startTime >= CMTimeGetSeconds(currentT)
|
|
|
} else {
|
|
|
- return model.1.endTime > CMTimeGetSeconds(currentT)
|
|
|
+ return model.1.endTime > CMTimeGetSeconds(currentT)
|
|
|
}
|
|
|
} ?? (-1, nil)
|
|
|
|
|
|
- guard let recordedAudio = recordedAudio else {
|
|
|
+ guard let recordedAudio = recordedAudio, recordedAudio.wavFilePath.count > 0 else {
|
|
|
return
|
|
|
}
|
|
|
BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime), \(recordedAudio.wavFilePath ?? "")")
|
|
|
|
|
|
// 创建播放器
|
|
|
- if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString).lastPathComponent {
|
|
|
+ if recordPlayer == nil || (recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString?)?.lastPathComponent {
|
|
|
let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
|
|
|
BFLog(1, message: "录音播放器初始化:\(recordPlayer == nil ? "init player" : "replace item")")
|
|
|
|
|
@@ -1208,7 +1217,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func play() {
|
|
|
BFLog(1, message: "开始播放 \(currentAssetProgress.seconds)")
|
|
|
-
|
|
|
+
|
|
|
isNormalPlaying = true
|
|
|
if isEndPlay {
|
|
|
isEndPlay = false
|
|
@@ -1231,7 +1240,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// 处理图片音频播放
|
|
|
imageRecordPlay()
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
deleteRecordBtn.isHidden = true
|
|
|
recordBtn.isHidden = false
|
|
|
}
|
|
@@ -1315,8 +1324,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
assetPlayer?.replaceCurrentItem(with: item)
|
|
|
} else {
|
|
|
assetPlayer = AVPlayer(playerItem: item)
|
|
|
- assetPlayer?.volume = self.noSpeakVolume
|
|
|
-
|
|
|
+ assetPlayer?.volume = noSpeakVolume
|
|
|
+
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 1000), queue: DispatchQueue.global()) { [weak self] time in
|
|
|
// 进度监控
|
|
@@ -1427,42 +1436,31 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
- func drawOrUpdateRecordProgessLable() {
|
|
|
- DispatchQueue.main.async { [weak self] in
|
|
|
- guard let sself = self else {
|
|
|
- return
|
|
|
- }
|
|
|
|
|
|
- sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
|
|
|
- vv.removeFromSuperview()
|
|
|
- }
|
|
|
- let totalDur = sself.itemModels[sself.currItemModelIndex].materialDuraion
|
|
|
- let height = sself.progressThumV.progessIndicateBackV.height
|
|
|
- if sself.itemModels[sself.currItemModelIndex].mediaType == .VIDEO {
|
|
|
- if totalDur > 0, sself.itemModels[sself.currItemModelIndex].voiceStickers.count > 0 {
|
|
|
- let width = sself.progressThumV.progessIndicateBackV.width
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { model in
|
|
|
- let lineV = UIView(frame: CGRect(x: model.startTime * Double(width) / totalDur, y: 0, width: (model.endTime - model.startTime) * Double(width) / totalDur, height: Double(height)))
|
|
|
- lineV.backgroundColor = ThemeStyleColor
|
|
|
- sself.progressThumV.progessIndicateBackV.addSubview(lineV)
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
- let lineV = UIView(frame: CGRect(x: 0, y: 0, width: totalDur * sself.progressThumV.thumbImageWidth / 2, height: Double(height)))
|
|
|
- lineV.backgroundColor = ThemeStyleColor
|
|
|
- sself.progressThumV.progessIndicateBackV.addSubview(lineV)
|
|
|
- }
|
|
|
+ func drawProgressIndication(progress: Double) {
|
|
|
+ if indirectionView == nil {
|
|
|
+ indirectionView = BFIndirectionProgressView(frame: progressThumV.progessIndicateBackV.bounds, percenWidth: itemModels[currItemModelIndex].mediaType == .IMAGE ? progressThumV.thumbImageWidth / 2 : 0, totalDuration: itemModels[currItemModelIndex].materialDuraion)
|
|
|
+ progressThumV.progessIndicateBackV.addSubview((indirectionView)!)
|
|
|
+ }
|
|
|
+ // 更新录制进度
|
|
|
+ // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
+ // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
+ if isRecording {
|
|
|
+ let ratioX = 0.08
|
|
|
+ indirectionView?.setProgress(start: recordStartTime, progress: progress - ratioX)
|
|
|
+ }
|
|
|
+ if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
|
+ imageRecordProgress(isRecord: true, progress: progress)
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// 修正视频旋转方向,因为自己录制的竖屏视频会预览为横屏
|
|
|
func reloadMaterial(recordItem: BFRecordItemModel) {
|
|
|
if let path = recordItem.localPath, let cell: BFImageCoverViewCell = collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell {
|
|
|
setVideoPlay(item: recordItem.playItem, imageView: cell.playView)
|
|
|
setAudioPlay(item: recordItem.playItem)
|
|
|
playBtn = cell.playBtn
|
|
|
-
|
|
|
+
|
|
|
let degress = degressFromVideoFile(url: URL(fileURLWithPath: path))
|
|
|
switch degress {
|
|
|
case 90:
|
|
@@ -1476,7 +1474,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
}
|
|
|
|
|
|
extension BFRecordScreenController: GPUImageMovieDelegate {
|
|
@@ -1498,9 +1495,6 @@ extension BFRecordScreenController: AVAudioPlayerDelegate {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
// MARK: - UICollectionViewDelegate
|
|
|
|
|
|
/// UICollectionViewDelegate
|
|
@@ -1566,7 +1560,8 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
|
|
|
events.append(WithDrawModel(type: 0, timestamp: 0))
|
|
|
itemModels[currItemModelIndex].events = events
|
|
|
events = itemModels[page].events
|
|
|
-
|
|
|
+ searchStopAtRecordRange()
|
|
|
+
|
|
|
// 更新当前page
|
|
|
pauseTime = 0
|
|
|
currItemModelIndex = page
|
|
@@ -1577,9 +1572,8 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
|
|
|
if changeItemHandle != nil {
|
|
|
changeItemHandle!(page)
|
|
|
}
|
|
|
-
|
|
|
-// movie?.startProcessing()
|
|
|
|
|
|
+// movie?.startProcessing()
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -1611,7 +1605,11 @@ public extension BFRecordScreenController {
|
|
|
if (recordItem?.endTime ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endTime ?? 0) {
|
|
|
self?.isEndPlay = true
|
|
|
self?.pause()
|
|
|
+ // 注:矫正进度--播放结束后当前指针应该到当前素材总时长
|
|
|
+ self?.currentAssetProgress = CMTime(seconds: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0, preferredTimescale: 1000)
|
|
|
+ self?.resetCurrentProgress()
|
|
|
} else {
|
|
|
+ // 注:矫正进度--一段录音播放结束后当前指针应该到当前录音结束点
|
|
|
self?.currentAssetProgress = CMTime(seconds: recordItem?.endTime ?? 0, preferredTimescale: 1000)
|
|
|
// 当开始播放时重置录音播放起始时间
|
|
|
self?.recordStartPlayTime = self?.currentAssetProgress ?? CMTime.zero
|
|
@@ -1634,11 +1632,17 @@ public extension BFRecordScreenController {
|
|
|
}
|
|
|
BFLog(1, message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress),\(itemModels[currItemModelIndex].materialDuraion)")
|
|
|
if itemModels[currItemModelIndex].mediaType == .IMAGE {
|
|
|
- DispatchQueue.main.async { [weak self] in
|
|
|
- self?.progreddL.text = String(format: "%@", (self?.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
|
|
|
- self?.progressThumV.progress = (self?.currentAssetProgress.seconds ?? 0)
|
|
|
- self?.updateSubtitle(time: self?.currentAssetProgress ?? CMTime.zero)
|
|
|
- }
|
|
|
+ /// 重置进度
|
|
|
+ resetCurrentProgress()
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ /// 重置进度
|
|
|
+ func resetCurrentProgress() {
|
|
|
+ DispatchQueue.main.async { [weak self] in
|
|
|
+ self?.progreddL.text = String(format: "%@", (self?.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
|
|
|
+ self?.progressThumV.progress = (self?.currentAssetProgress.seconds ?? 0)
|
|
|
+ self?.updateSubtitle(time: self?.currentAssetProgress ?? CMTime.zero)
|
|
|
}
|
|
|
}
|
|
|
}
|