|
@@ -14,13 +14,23 @@ import BFCommonKit
|
|
|
import BFFramework
|
|
|
import UIKit
|
|
|
|
|
|
+
|
|
|
+struct WithDrawModel {
|
|
|
+ var type:Int // 0:拖动; 1:预览播放暂停 2: 录音结束
|
|
|
+ var timestamp:Double
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
public var nextActionHandle:(()->Void)?
|
|
|
public var closeActionHandle:(()->Void)?
|
|
|
|
|
|
// MARK: - 录制参数
|
|
|
- public var asset:PHAsset?
|
|
|
+ public var assets = [PHAsset]()
|
|
|
+
|
|
|
+ var currItemModelIndex = 0
|
|
|
+ public var itemModels = [BFRecordItemModel]()
|
|
|
// var shouldPlayRecordIndex:Int = -1 // 当前应该播放的录音资源序号
|
|
|
var currentPlayRecordIndex:Int = -1 // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
|
|
|
var isRecording = false { // 是否正在录音
|
|
@@ -41,19 +51,24 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
var currentAssetProgress : CMTime = .zero // 当前素材播放的进度
|
|
|
// 视频素材
|
|
|
public var avasset:AVURLAsset?
|
|
|
- public var recordList:[PQVoiceModel] = [PQVoiceModel]()
|
|
|
+
|
|
|
+// public var recordList:[PQVoiceModel] = [PQVoiceModel]()
|
|
|
+
|
|
|
var assetPlayer:AVPlayer? // 原视频音频播放器
|
|
|
var isCompletePlay = true
|
|
|
var hadPrepareToPlayRecord = false // 录音播放器准备
|
|
|
var recordPlayer:AVPlayer? // 录音音频播放器
|
|
|
var movie :GPUImageMovie? // 视频预览
|
|
|
var playView :GPUImageView? // 视频展示视图
|
|
|
- var isDragingProgressSlder : Bool = false // 是否在拖动进度条
|
|
|
|
|
|
+ // MARK: 行为参数
|
|
|
+ var events = [WithDrawModel]() // 行为记录,方便撤销
|
|
|
+ var isDragingProgressSlder : Bool = false // 是否在拖动进度条
|
|
|
+ var isStopAtRecordRange = -1
|
|
|
//定义音频的编码参数
|
|
|
- let recordSettings:[String : Any] = [AVSampleRateKey : 44100.0, //声音采样率
|
|
|
- AVFormatIDKey : kAudioFormatLinearPCM, //编码格式
|
|
|
- AVNumberOfChannelsKey : 1, //采集音轨
|
|
|
+ let recordSettings:[String : Any] = [AVSampleRateKey : 44100.0, // 声音采样率
|
|
|
+ AVFormatIDKey : kAudioFormatLinearPCM, // 编码格式
|
|
|
+ AVNumberOfChannelsKey : 1, // 采集音轨
|
|
|
AVEncoderBitDepthHintKey: 16, // 位深
|
|
|
AVEncoderAudioQualityKey : AVAudioQuality.medium.rawValue] //音频质量
|
|
|
|
|
@@ -66,26 +81,29 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
}
|
|
|
manager.endRecordHandle = {[weak self] (model, error) in
|
|
|
- if let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
|
|
|
+ if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
|
|
|
// 加入到语音数组里
|
|
|
- let ass = AVURLAsset(url: URL(fileURLWithPath: model.wavFilePath))
|
|
|
-
|
|
|
- model.endTime = model.startTime + CMTimeGetSeconds(ass.duration)
|
|
|
-
|
|
|
- // TODO: 原逻辑要删除新录音后边的数据, 新逻辑是插入覆盖
|
|
|
- while let m = self?.recordList.last{
|
|
|
- if model.startTime < m.startTime {
|
|
|
- self?.recordList.removeLast()
|
|
|
- }else if m.endTime > model.startTime {
|
|
|
- m.endTime = model.startTime
|
|
|
- }else{
|
|
|
+ // TODO: 原逻辑要删除新录音后边的数据, 新逻辑是覆盖则删除
|
|
|
+ var index = sself.itemModels[sself.currItemModelIndex].voiceStickers.count - 1
|
|
|
+ while index >= 0{
|
|
|
+ let m = sself.itemModels[sself.currItemModelIndex].voiceStickers[index]
|
|
|
+ index -= 1
|
|
|
+ if model.endTime > m.startTime && model.endTime <= m.endTime
|
|
|
+ || model.startTime <= m.startTime && model.startTime > m.endTime{
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.remove(at: index+1)
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ if model.startTime < m.endTime {
|
|
|
break
|
|
|
}
|
|
|
}
|
|
|
BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
- self?.recordList.append(model)
|
|
|
- self?.drawOrUpdateRecordProgessLable()
|
|
|
- self?.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
+
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
+
|
|
|
+ sself.drawOrUpdateRecordProgessLable()
|
|
|
+
|
|
|
+ sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
}
|
|
|
|
|
|
}
|
|
@@ -109,6 +127,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var playBtn:UIButton = {
|
|
|
let btn = UIButton(frame: view.bounds)
|
|
|
+ btn.setImage(imageInRecordScreenKit(by: "preview_play"), for: .normal)
|
|
|
+ let vv = UIView(frame: CGRect(x: 0, y: 0, width: 1, height: 1))
|
|
|
+ btn.setImage(vv.graphicsGetImage(), for: .selected)
|
|
|
btn.addTarget(self, action: #selector(playVideo(btn:)), for: .touchUpInside)
|
|
|
return btn
|
|
|
}()
|
|
@@ -129,7 +150,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var recordBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
- btn.backgroundColor = ThemeStyleGreen()
|
|
|
+ btn.backgroundColor = ThemeStyleColor
|
|
|
btn.setTitle("按住 说话", for: .normal)
|
|
|
btn.adjustsImageWhenHighlighted = false
|
|
|
btn.addTarget(self, action: #selector(startRecord), for: .touchDown)
|
|
@@ -138,13 +159,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return btn
|
|
|
}()
|
|
|
|
|
|
- lazy var progessSildeBackV : UIView = {
|
|
|
- let vv = UIView()
|
|
|
- vv.backgroundColor = .orange // .clear
|
|
|
-
|
|
|
- return vv
|
|
|
- }()
|
|
|
-
|
|
|
lazy var withDrawBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
btn.setImage(imageInRecordScreenKit(by: "withdraw_n"), for: .normal)
|
|
@@ -164,7 +178,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
btn.setImage(imageInRecordScreenKit(by: "changeVoice_h"), for: .highlighted)
|
|
|
btn.setTitle("变声", for: .normal)
|
|
|
btn.setTitleColor(.white, for: .normal)
|
|
|
- btn.setTitleColor(ThemeStyleGreen(), for: .highlighted)
|
|
|
+ btn.setTitleColor(ThemeStyleColor, for: .highlighted)
|
|
|
btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
|
|
|
btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
|
|
|
btn.addTarget(self, action: #selector(changeVoiceAction), for: .touchUpInside)
|
|
@@ -241,15 +255,18 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var progressThumV : BFVideoThumbProgressView = {
|
|
|
let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
|
|
|
- vv.dragScrollProgressHandle = {[weak self] process in
|
|
|
+ vv.dragScrollProgressHandle = {[weak self] isStart, process in
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
guard let sself = self else {
|
|
|
return
|
|
|
}
|
|
|
+ if isStart {
|
|
|
+ sself.events.append(WithDrawModel(type: 0, timestamp: sself.currentAssetProgress.seconds))
|
|
|
+ }
|
|
|
if sself.isNormalPlaying || sself.isRecording {
|
|
|
sself.pause()
|
|
|
- sself.isDragingProgressSlder = true
|
|
|
}
|
|
|
+ sself.isDragingProgressSlder = true
|
|
|
sself.changeProgress(progress: process)
|
|
|
}
|
|
|
}
|
|
@@ -258,11 +275,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
guard let sself = self else {
|
|
|
return
|
|
|
}
|
|
|
- sself.changeProgress(progress: process)
|
|
|
-
|
|
|
- sself.isDragingProgressSlder = false
|
|
|
- sself.currentPlayRecordIndex = -1
|
|
|
- sself.hadPrepareToPlayRecord = false
|
|
|
+ sself.thumbViewEnded(progress: process)
|
|
|
}
|
|
|
vv.isHidden = true
|
|
|
return vv
|
|
@@ -311,9 +324,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// view.addSubview(toolV)
|
|
|
bottomeView.addSubview(recordBtn)
|
|
|
bottomeView.addSubview(withDrawBtn)
|
|
|
- bottomeView.addSubview(changeVoiceBtn)
|
|
|
+// bottomeView.addSubview(changeVoiceBtn)
|
|
|
bottomeView.addSubview(progressThumV)
|
|
|
- progressThumV.addSubview(progessSildeBackV)
|
|
|
|
|
|
if checkStatus() {
|
|
|
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
@@ -347,29 +359,18 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
make.width.height.equalTo(65)
|
|
|
make.top.equalTo(128)
|
|
|
}
|
|
|
- changeVoiceBtn.snp.makeConstraints { make in
|
|
|
- make.right.equalToSuperview()
|
|
|
- make.top.width.height.equalTo(withDrawBtn)
|
|
|
- }
|
|
|
+// changeVoiceBtn.snp.makeConstraints { make in
|
|
|
+// make.right.equalToSuperview()
|
|
|
+// make.top.width.height.equalTo(withDrawBtn)
|
|
|
+// }
|
|
|
|
|
|
recordBtn.snp.makeConstraints { make in
|
|
|
make.left.equalTo(withDrawBtn.snp.right)
|
|
|
- make.right.equalTo(changeVoiceBtn.snp.left)
|
|
|
+ make.right.equalTo(-65)
|
|
|
make.height.equalTo(42)
|
|
|
make.top.equalTo(withDrawBtn).offset(6)
|
|
|
}
|
|
|
-
|
|
|
- progessSildeBackV.snp.makeConstraints { make in
|
|
|
- make.left.equalToSuperview()
|
|
|
- make.right.equalToSuperview()
|
|
|
- make.bottom.equalToSuperview()
|
|
|
- make.height.equalTo(8)
|
|
|
- }
|
|
|
-
|
|
|
-// progessSilde.snp.makeConstraints { make in
|
|
|
-// make.left.right.centerY.equalTo(progessSildeBackV)
|
|
|
-// make.height.equalTo(20)
|
|
|
-// }
|
|
|
+
|
|
|
// openCameraBtn.snp.makeConstraints { make in
|
|
|
// make.right.equalToSuperview().offset(-12)
|
|
|
// make.top.equalToSuperview().offset(98)
|
|
@@ -493,9 +494,11 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
isRecording = true
|
|
|
|
|
|
pause()
|
|
|
+
|
|
|
+ events.append(WithDrawModel(type: 2, timestamp: self.currentAssetProgress.seconds))
|
|
|
|
|
|
let model = PQVoiceModel()
|
|
|
- model.startTime = CMTimeGetSeconds(self.currentAssetProgress)
|
|
|
+ model.startTime = self.currentAssetProgress.seconds
|
|
|
model.volume = 100
|
|
|
|
|
|
// recorderManager.voiceModel = model
|
|
@@ -510,21 +513,20 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
recorderManager.voiceModel = model
|
|
|
- recorderManager.startRecord(index: recordList.count)
|
|
|
+ recorderManager.startRecord(index: 1)
|
|
|
// movie?.startProcessing()
|
|
|
assetPlayer?.volume = 0
|
|
|
assetPlayer?.play()
|
|
|
-
|
|
|
+ playBtn.isSelected = true
|
|
|
}
|
|
|
|
|
|
@objc func endRecord(){
|
|
|
-
|
|
|
|
|
|
// 存储录音
|
|
|
-
|
|
|
- recorderManager.endRecord()
|
|
|
isRecording = false
|
|
|
|
|
|
+ recorderManager.voiceModel?.endTime = self.currentAssetProgress.seconds
|
|
|
+ recorderManager.endRecord()
|
|
|
|
|
|
pause()
|
|
|
if(!avatarView.isHidden){
|
|
@@ -539,8 +541,32 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
pause()
|
|
|
}
|
|
|
+
|
|
|
@objc func withdrawAction(){
|
|
|
pause()
|
|
|
+ if let action = events.last {
|
|
|
+ var jumpTime = action.timestamp
|
|
|
+ if action.type == 2 {
|
|
|
+ // 撤销录制
|
|
|
+ if let modelIndex = itemModels[currItemModelIndex].voiceStickers.firstIndex(where: { mod in
|
|
|
+ mod.startTime == action.timestamp
|
|
|
+ }) {
|
|
|
+ let model = itemModels[currItemModelIndex].voiceStickers[modelIndex]
|
|
|
+ itemModels[currItemModelIndex].voiceStickers.remove(at: modelIndex)
|
|
|
+ drawOrUpdateRecordProgessLable()
|
|
|
+ jumpTime = model.startTime
|
|
|
+ }
|
|
|
+ }else {
|
|
|
+ }
|
|
|
+ events.removeLast()
|
|
|
+ if let dur = itemModels[currItemModelIndex].baseMaterial?.duration.seconds,dur > 0 {
|
|
|
+ changeProgress(progress: Float(jumpTime / dur))
|
|
|
+ isDragingProgressSlder = false
|
|
|
+ currentPlayRecordIndex = -1
|
|
|
+ hadPrepareToPlayRecord = false
|
|
|
+ progressThumV.progress = jumpTime
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
@objc func changeVoiceAction(){
|
|
@@ -549,12 +575,19 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
@objc func playVideo(btn:UIButton){
|
|
|
- btn.isSelected ? pause() : play()
|
|
|
+ if btn.isSelected {
|
|
|
+ pause()
|
|
|
+ searchStopAtRecordRange()
|
|
|
+ }else {
|
|
|
+ events.append(WithDrawModel(type: 1, timestamp: self.currentAssetProgress.seconds))
|
|
|
+ play()
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
@objc func sliderTouchBegan(sender _: UISlider) {
|
|
|
isDragingProgressSlder = true
|
|
|
pause()
|
|
|
+
|
|
|
}
|
|
|
|
|
|
@objc func sliderTouchEnded(sender: UISlider) {
|
|
@@ -565,7 +598,30 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
@objc func sliderValueDidChanged(sender: UISlider) {
|
|
|
changeProgress(progress: sender.value)
|
|
|
-
|
|
|
+ }
|
|
|
+
|
|
|
+ func thumbViewEnded(progress:Float) {
|
|
|
+ changeProgress(progress: progress)
|
|
|
+ isDragingProgressSlder = false
|
|
|
+ currentPlayRecordIndex = -1
|
|
|
+ hadPrepareToPlayRecord = false
|
|
|
+
|
|
|
+ searchStopAtRecordRange()
|
|
|
+ }
|
|
|
+
|
|
|
+ func searchStopAtRecordRange() {
|
|
|
+ // TODO: 判断是否停止录音区间,是则删除相关录音,画笔,头像,字幕
|
|
|
+ let elems = itemModels[currItemModelIndex].voiceStickers.enumerated().filter({ elem in
|
|
|
+ elem.1.startTime <= self.currentAssetProgress.seconds && elem.1.endTime > self.currentAssetProgress.seconds
|
|
|
+ })
|
|
|
+ if elems.count > 0{
|
|
|
+ // TODO: 停在了录音区间,显示删除按钮
|
|
|
+ isStopAtRecordRange = elems.first!.0
|
|
|
+ BFLog(1, message: "停在了录音区间 里")
|
|
|
+ }else {
|
|
|
+ isStopAtRecordRange = -1
|
|
|
+ BFLog(1, message: "停在了录音区间 外")
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
// MARK: - 权限申请
|
|
@@ -618,7 +674,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
|
|
|
return
|
|
|
}
|
|
|
- let (shouldPlayRecordIndex, recordedAudio) = recordList.enumerated().first { model in
|
|
|
+
|
|
|
+ let (shouldPlayRecordIndex, recordedAudio) = itemModels[currItemModelIndex].voiceStickers.enumerated().first { model in
|
|
|
model.1.endTime > CMTimeGetSeconds(currentT)
|
|
|
} ?? (-1, nil)
|
|
|
|
|
@@ -662,7 +719,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if currentPlayRecordIndex == -1 && self.isNormalPlaying{
|
|
|
let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
- self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: {[weak self] finished in
|
|
|
+ self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000), completionHandler: {[weak self] finished in
|
|
|
if finished && (self?.isNormalPlaying ?? false) {
|
|
|
self?.recordPlayer?.play()
|
|
|
BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero))")
|
|
@@ -711,57 +768,69 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
assetPlayer?.pause()
|
|
|
recordPlayer?.pause()
|
|
|
|
|
|
- assetPlayer?.seek(to: self.currentAssetProgress , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
|
|
|
+ assetPlayer?.seek(to: self.currentAssetProgress , toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000), completionHandler: { finished in
|
|
|
})
|
|
|
}
|
|
|
|
|
|
func fetchVideo(){
|
|
|
- if let asset = self.asset {
|
|
|
- let options = PHVideoRequestOptions()
|
|
|
- options.isNetworkAccessAllowed = true
|
|
|
- options.deliveryMode = .automatic
|
|
|
+ if self.assets.count > 0 {
|
|
|
+
|
|
|
+ currItemModelIndex = 0
|
|
|
|
|
|
- PHImageManager.default().requestPlayerItem(forVideo:asset, options: options, resultHandler: { [weak self] playerItem, info in
|
|
|
+ for (index, asset) in self.assets.enumerated() {
|
|
|
+ let itemModel = BFRecordItemModel()
|
|
|
+ itemModel.index = 0
|
|
|
+ itemModel.width = asset.pixelWidth
|
|
|
+ itemModel.height = asset.pixelHeight
|
|
|
+
|
|
|
+ itemModels.append(itemModel)
|
|
|
+
|
|
|
+ let options = PHVideoRequestOptions()
|
|
|
+ options.isNetworkAccessAllowed = true
|
|
|
+ options.deliveryMode = .automatic
|
|
|
+
|
|
|
+ PHImageManager.default().requestPlayerItem(forVideo:asset, options: options, resultHandler: { [weak self] playerItem, info in
|
|
|
|
|
|
- guard let item = playerItem else {
|
|
|
- cShowHUB(superView: nil, msg: "视频获取失败")
|
|
|
- return
|
|
|
- }
|
|
|
- self?.setAudioPlay(item: item)
|
|
|
- self?.setVideoPlay(item: item)
|
|
|
+ guard let item = playerItem else {
|
|
|
+ cShowHUB(superView: nil, msg: "视频获取失败")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ if index == 0 {
|
|
|
+ self?.setAudioPlay(item: item)
|
|
|
+ self?.setVideoPlay(item: item)
|
|
|
+ }
|
|
|
+ })
|
|
|
|
|
|
- })
|
|
|
-
|
|
|
- let option = PHImageRequestOptions()
|
|
|
- option.isNetworkAccessAllowed = true //允许下载iCloud的图片
|
|
|
- option.resizeMode = .fast
|
|
|
- option.deliveryMode = .highQualityFormat
|
|
|
- PHImageManager.default().requestImage(for: asset,
|
|
|
- targetSize: self.view.bounds.size,
|
|
|
- contentMode: .aspectFit,
|
|
|
- options: option)
|
|
|
- { (image, nil) in
|
|
|
- // 设置首帧/封面
|
|
|
- if image != nil {
|
|
|
- let pic = GPUImagePicture(image: image)
|
|
|
- let filet = GPUImageFilter()
|
|
|
- pic?.addTarget(filet)
|
|
|
- filet.addTarget(self.playView)
|
|
|
- pic?.processImage()
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
|
|
|
- if let urlass = asset as? AVURLAsset {
|
|
|
- self?.avasset = urlass
|
|
|
- DispatchQueue.main.async {[weak self] in
|
|
|
- self?.progressThumV.videoAsset = urlass
|
|
|
- self?.progressThumV.isHidden = false
|
|
|
-// self?.progessSildeBackV.setNeedsLayout()
|
|
|
-// self?.progessSildeBackV.layoutIfNeeded()
|
|
|
+// let option = PHImageRequestOptions()
|
|
|
+// option.isNetworkAccessAllowed = true //允许下载iCloud的图片
|
|
|
+// option.resizeMode = .fast
|
|
|
+// option.deliveryMode = .highQualityFormat
|
|
|
+// PHImageManager.default().requestImage(for: asset,
|
|
|
+// targetSize: self.view.bounds.size,
|
|
|
+// contentMode: .aspectFit,
|
|
|
+// options: option)
|
|
|
+// { (image, nil) in
|
|
|
+// // 设置首帧/封面
|
|
|
+// if image != nil {
|
|
|
+// let pic = GPUImagePicture(image: image)
|
|
|
+// let filet = GPUImageFilter()
|
|
|
+// pic?.addTarget(filet)
|
|
|
+// filet.addTarget(self.playView)
|
|
|
+// pic?.processImage()
|
|
|
+// }
|
|
|
+// }
|
|
|
+
|
|
|
+ PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
|
|
|
+ if let urlasset = asset as? AVURLAsset {
|
|
|
+ self?.avasset = urlasset
|
|
|
+ itemModel.baseMaterial = urlasset
|
|
|
+ DispatchQueue.main.async {[weak self] in
|
|
|
+ self?.progressThumV.videoAsset = urlasset
|
|
|
+ self?.progressThumV.isHidden = false
|
|
|
+ }
|
|
|
}
|
|
|
- }
|
|
|
- })
|
|
|
+ })
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
}
|
|
@@ -790,21 +859,24 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
assetPlayer = AVPlayer(playerItem: item)
|
|
|
avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
|
|
|
// 进度监控
|
|
|
- if !((self?.isNormalPlaying ?? false) || (self?.isRecording ?? false)) {
|
|
|
- return
|
|
|
- }
|
|
|
|
|
|
- // 播放对应的录音音频
|
|
|
- self?.playRecord(at: time)
|
|
|
-
|
|
|
self?.currentAssetProgress = time
|
|
|
BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
|
|
|
- if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
|
|
|
+ if CMTimeGetSeconds(item.duration) > 0 {
|
|
|
DispatchQueue.main.async { [weak self] in
|
|
|
self?.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
|
|
|
- self?.progressThumV.progress = time.seconds
|
|
|
+ if !(self?.isDragingProgressSlder ?? false){
|
|
|
+ self?.progressThumV.progress = time.seconds
|
|
|
+ }
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+ if (self?.isNormalPlaying ?? false) || (self?.isRecording ?? false) {
|
|
|
+ // 播放对应的录音音频
|
|
|
+ self?.playRecord(at: time)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
} as? NSKeyValueObservation
|
|
|
}
|
|
|
|
|
@@ -813,6 +885,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
BFLog(1, message: "AVPlayerItemDidPlayToEndTime = \(notify)")
|
|
|
self?.isNormalPlaying = false
|
|
|
self?.assetPlayer?.seek(to: CMTime.zero)
|
|
|
+ self?.progressThumV.progress = 0
|
|
|
+
|
|
|
self?.currentPlayRecordIndex = -1
|
|
|
if self?.isRecording ?? false {
|
|
|
self?.endRecord()
|
|
@@ -843,7 +917,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
self!.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(self!.currentAssetProgress))
|
|
|
}
|
|
|
|
|
|
- assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) { finished in
|
|
|
+ assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000)) { finished in
|
|
|
|
|
|
}
|
|
|
}
|
|
@@ -851,18 +925,21 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func drawOrUpdateRecordProgessLable(){
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
- self?.progessSildeBackV.subviews.forEach { vv in
|
|
|
+ guard let sself = self else {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
|
|
|
vv.removeFromSuperview()
|
|
|
}
|
|
|
- if let totalDur = self?.asset?.duration, totalDur > 0, let list = self?.recordList {
|
|
|
- let width = self?.progessSildeBackV.width ?? 0
|
|
|
- let height = self?.progessSildeBackV.height ?? 0
|
|
|
- list.forEach { model in
|
|
|
-
|
|
|
- let lineV = UIView(frame: CGRect(x: model.startTime * Double(width) / totalDur , y: 0, width: (model.endTime - model.startTime) * Double(width) / totalDur, height: Double(height)))
|
|
|
- lineV.backgroundColor = ThemeStyleGreen()
|
|
|
|
|
|
- self?.progessSildeBackV.addSubview(lineV)
|
|
|
+
|
|
|
+ if let totalDur = sself.itemModels[sself.currItemModelIndex].baseMaterial?.duration.seconds, totalDur > 0, sself.itemModels[sself.currItemModelIndex].voiceStickers.count > 0 {
|
|
|
+ let width = sself.progressThumV.progessIndicateBackV.width
|
|
|
+ let height = sself.progressThumV.progessIndicateBackV.height
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { model in
|
|
|
+ let lineV = UIView(frame: CGRect(x: model.startTime * width / totalDur , y: 0, width: (model.endTime - model.startTime) * width / totalDur, height: height))
|
|
|
+ lineV.backgroundColor = ThemeStyleColor
|
|
|
+ sself.progressThumV.progessIndicateBackV.addSubview(lineV)
|
|
|
}
|
|
|
}
|
|
|
}
|