|
@@ -23,7 +23,16 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
public var asset:PHAsset?
|
|
|
// var shouldPlayRecordIndex:Int = -1 // 当前应该播放的录音资源序号
|
|
|
var currentPlayRecordIndex:Int = -1 // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
|
|
|
- var isRecording = false // 是否正在录音
|
|
|
+ var isRecording = false { // 是否正在录音
|
|
|
+ didSet{
|
|
|
+ withDrawBtn.isHidden = isRecording
|
|
|
+ changeVoiceBtn.isHidden = isRecording
|
|
|
+
|
|
|
+ recordBtn.setTitle(isRecording ? "松手 完成" :"按住 说话", for: .normal)
|
|
|
+ recordBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67", alpha: isRecording ? 0.6 : 1)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
var isNormalPlaying = false { // 是否正在播放
|
|
|
didSet{
|
|
|
playBtn.isSelected = isNormalPlaying
|
|
@@ -56,7 +65,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
manager.cancelRecordHandle = { error in
|
|
|
|
|
|
}
|
|
|
- manager.endRecordHandle = {[weak self] (isTimeout, model) in
|
|
|
+ manager.endRecordHandle = {[weak self] (model, error) in
|
|
|
if let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
|
|
|
// 加入到语音数组里
|
|
|
let ass = AVURLAsset(url: URL(fileURLWithPath: model.wavFilePath))
|
|
@@ -120,7 +129,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var recordBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
- btn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
|
|
|
+ btn.backgroundColor = ThemeStyleGreen
|
|
|
+ btn.setTitle("按住 说话", for: .normal)
|
|
|
btn.adjustsImageWhenHighlighted = false
|
|
|
btn.addTarget(self, action: #selector(startRecord), for: .touchDown)
|
|
|
btn.addTarget(self, action: #selector(endRecord), for: .touchUpInside)
|
|
@@ -153,17 +163,29 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
return sliderView
|
|
|
}()
|
|
|
|
|
|
- lazy var closeBtn:UIButton = {
|
|
|
+ lazy var withDrawBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
- btn.setImage(imageInRecordScreenKit(by: "xx"), for: .normal)
|
|
|
- btn.addTarget(self, action: #selector(closePage), for: .touchUpInside)
|
|
|
+ btn.setImage(imageInRecordScreenKit(by: "withdraw_h"), for: .normal)
|
|
|
+ btn.setImage(imageInRecordScreenKit(by: "withdraw_n"), for: .highlighted)
|
|
|
+ btn.setTitle("撤销", for: .normal)
|
|
|
+ btn.setTitleColor(.white, for: .normal)
|
|
|
+ btn.setTitleColor(.gray, for: .highlighted)
|
|
|
+ btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
|
|
|
+ btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
|
|
|
+ btn.addTarget(self, action: #selector(withdrawAction), for: .touchUpInside)
|
|
|
return btn
|
|
|
}()
|
|
|
|
|
|
- lazy var nextBtn:UIButton = {
|
|
|
+ lazy var changeVoiceBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
- btn.setImage(imageInRecordScreenKit(by: "gou"), for: .normal)
|
|
|
- btn.addTarget(self, action: #selector(nextAction), for: .touchUpInside)
|
|
|
+ btn.setImage(imageInRecordScreenKit(by: "changeVoice_n"), for: .normal)
|
|
|
+ btn.setImage(imageInRecordScreenKit(by: "changeVoice_h"), for: .highlighted)
|
|
|
+ btn.setTitle("变声", for: .normal)
|
|
|
+ btn.setTitleColor(.white, for: .normal)
|
|
|
+ btn.setTitleColor(ThemeStyleGreen, for: .highlighted)
|
|
|
+ btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
|
|
|
+ btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
|
|
|
+ btn.addTarget(self, action: #selector(changeVoiceAction), for: .touchUpInside)
|
|
|
return btn
|
|
|
}()
|
|
|
|
|
@@ -188,7 +210,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
cleanMovieTarget()
|
|
|
NotificationCenter.default.removeObserver(self)
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
- recorderManager.stopRecord(isCancel: true)
|
|
|
+ if isRecording{
|
|
|
+ recorderManager.stopRecord(isCancel: true)
|
|
|
+ }
|
|
|
assetPlayer?.pause()
|
|
|
recordPlayer?.pause()
|
|
|
|
|
@@ -217,8 +241,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
bottomeView.addSubview(recordBtn)
|
|
|
bottomeView.addSubview(progessSildeBackV)
|
|
|
bottomeView.addSubview(progessSilde)
|
|
|
- bottomeView.addSubview(closeBtn)
|
|
|
- bottomeView.addSubview(nextBtn)
|
|
|
+ bottomeView.addSubview(withDrawBtn)
|
|
|
+ bottomeView.addSubview(changeVoiceBtn)
|
|
|
|
|
|
if checkStatus() {
|
|
|
try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
@@ -233,26 +257,27 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
make.height.equalTo(adapterWidth(width: 220))
|
|
|
}
|
|
|
|
|
|
- recordBtn.snp.makeConstraints { make in
|
|
|
- make.width.height.equalTo(120)
|
|
|
- make.centerX.equalToSuperview()
|
|
|
- make.top.equalTo(27)
|
|
|
- }
|
|
|
-
|
|
|
- closeBtn.snp.makeConstraints { make in
|
|
|
+ withDrawBtn.snp.makeConstraints { make in
|
|
|
make.left.equalToSuperview()
|
|
|
- make.width.height.equalTo(60)
|
|
|
- make.top.equalTo(recordBtn.snp.bottom).offset(-10)
|
|
|
+ make.width.height.equalTo(65)
|
|
|
+ make.top.equalTo(128)
|
|
|
}
|
|
|
- nextBtn.snp.makeConstraints { make in
|
|
|
+ changeVoiceBtn.snp.makeConstraints { make in
|
|
|
make.right.equalToSuperview()
|
|
|
- make.top.width.height.equalTo(closeBtn)
|
|
|
+ make.top.width.height.equalTo(withDrawBtn)
|
|
|
+ }
|
|
|
+
|
|
|
+ recordBtn.snp.makeConstraints { make in
|
|
|
+ make.left.equalTo(withDrawBtn.snp.right)
|
|
|
+ make.right.equalTo(changeVoiceBtn.snp.left)
|
|
|
+ make.height.equalTo(42)
|
|
|
+ make.top.equalTo(withDrawBtn).offset(6)
|
|
|
+
|
|
|
}
|
|
|
|
|
|
progessSildeBackV.snp.makeConstraints { make in
|
|
|
- make.left.equalTo(closeBtn.snp.right).offset(16)
|
|
|
- make.right.equalTo(nextBtn.snp.left).offset(-16)
|
|
|
- make.centerY.equalTo(closeBtn)
|
|
|
+ make.left.width.equalTo(recordBtn)
|
|
|
+ make.top.equalTo(recordBtn).offset(-30)
|
|
|
make.height.equalTo(8)
|
|
|
}
|
|
|
|
|
@@ -261,6 +286,13 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
make.height.equalTo(20)
|
|
|
}
|
|
|
|
|
|
+
|
|
|
+ withDrawBtn.imageEdgeInsets = UIEdgeInsets(top: -withDrawBtn.imageView!.height, left: 0, bottom: 0, right: -withDrawBtn.titleLabel!.width);
|
|
|
+ withDrawBtn.titleEdgeInsets = UIEdgeInsets(top: withDrawBtn.titleLabel!.height + 2, left: -withDrawBtn.imageView!.width, bottom: 0, right: 0);
|
|
|
+
|
|
|
+ changeVoiceBtn.imageEdgeInsets = UIEdgeInsets(top: -changeVoiceBtn.imageView!.height-2, left: 0, bottom: 0, right: -changeVoiceBtn.titleLabel!.width);
|
|
|
+ changeVoiceBtn.titleEdgeInsets = UIEdgeInsets(top: changeVoiceBtn.titleLabel!.height+2, left: -changeVoiceBtn.imageView!.width, bottom: 0, right: 0);
|
|
|
+
|
|
|
}
|
|
|
|
|
|
// MARK: - 按钮事件响应
|
|
@@ -307,8 +339,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
@objc func startRecord(){
|
|
|
- recordBtn.setImage(imageInRecordScreenKit(by: "mic2"), for: .normal)
|
|
|
BFLog(1, message: "start \(UIControl.Event.touchDown)")
|
|
|
+ isRecording = true
|
|
|
+
|
|
|
pause()
|
|
|
|
|
|
let model = PQVoiceModel()
|
|
@@ -319,32 +352,30 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
movie?.startProcessing()
|
|
|
assetPlayer?.volume = 0
|
|
|
assetPlayer?.play()
|
|
|
- isRecording = true
|
|
|
}
|
|
|
|
|
|
@objc func endRecord(){
|
|
|
- recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
|
|
|
+
|
|
|
+ isRecording = false
|
|
|
// 存储录音
|
|
|
recorderManager.endRecord()
|
|
|
- isRecording = false
|
|
|
|
|
|
pause()
|
|
|
}
|
|
|
|
|
|
func cancleRecord(){
|
|
|
- recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
|
|
|
- recorderManager.cancleRecord()
|
|
|
+
|
|
|
isRecording = false
|
|
|
+ recorderManager.cancleRecord()
|
|
|
|
|
|
pause()
|
|
|
}
|
|
|
- @objc func closePage(){
|
|
|
+ @objc func withdrawAction(){
|
|
|
pause()
|
|
|
- closeActionHandle?()
|
|
|
}
|
|
|
|
|
|
- @objc func nextAction(){
|
|
|
- nextActionHandle?()
|
|
|
+ @objc func changeVoiceAction(){
|
|
|
+// nextActionHandle?()
|
|
|
pause()
|
|
|
}
|
|
|
|
|
@@ -492,10 +523,11 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
func play(){
|
|
|
- BFLog(1, message: "开始播放")
|
|
|
+ BFLog(1, message: "开始播放 \(self.currentAssetProgress.seconds)")
|
|
|
isNormalPlaying = true
|
|
|
assetPlayer?.volume = 0.2
|
|
|
movie?.startProcessing()
|
|
|
+
|
|
|
self.assetPlayer?.play()
|
|
|
}
|
|
|
|
|
@@ -506,8 +538,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
assetPlayer?.pause()
|
|
|
recordPlayer?.pause()
|
|
|
|
|
|
- let second = self.currentAssetProgress
|
|
|
- assetPlayer?.seek(to: second , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
|
|
|
+ assetPlayer?.seek(to: self.currentAssetProgress , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
|
|
|
})
|
|
|
}
|
|
|
|
|
@@ -537,7 +568,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
contentMode: .aspectFit,
|
|
|
options: option)
|
|
|
{ (image, nil) in
|
|
|
- //image就是图片
|
|
|
+ // 设置首帧/封面
|
|
|
if image != nil {
|
|
|
let pic = GPUImagePicture(image: image)
|
|
|
let filet = GPUImageFilter()
|
|
@@ -545,34 +576,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
filet.addTarget(self.playView)
|
|
|
pic?.processImage()
|
|
|
}
|
|
|
-
|
|
|
}
|
|
|
|
|
|
-// 使用copy资源到本地的方式
|
|
|
-// let outFilePath = NSHomeDirectory().appending("/Documents/simple.mp4")
|
|
|
-// let outFileUrl = URL(fileURLWithPath: outFilePath)
|
|
|
-// if FileManager.default.fileExists(atPath: outFilePath) {
|
|
|
-// try? FileManager.default.removeItem(atPath: outFilePath)
|
|
|
-// }
|
|
|
-//
|
|
|
-// let assetResources = PHAssetResource.assetResources(for: asset)
|
|
|
-
|
|
|
-// if let rsc = assetResources.first(where: { res in
|
|
|
-// res.type == .video || res.type == .pairedVideo
|
|
|
-// }) {
|
|
|
-// PHAssetResourceManager.default().writeData(for: rsc, toFile:outFileUrl, options: nil) {[weak self] error in
|
|
|
-// if error == nil {
|
|
|
-// DispatchQueue.main.async {[weak self] in
|
|
|
-// self?.setVideoPlay(url: outFileUrl)
|
|
|
-// self?.setAudioPlay(url: outFileUrl)
|
|
|
-// }
|
|
|
-// }else{
|
|
|
-// BFLog(1, message: "导出视频相exportAsynchro faile")
|
|
|
-// }
|
|
|
-// }
|
|
|
-// }else{
|
|
|
-//
|
|
|
-// }
|
|
|
PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
|
|
|
if let urlass = asset as? AVURLAsset {
|
|
|
self?.avasset = urlass
|
|
@@ -607,6 +612,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if !((self?.isNormalPlaying ?? false) || (self?.isRecording ?? false)) {
|
|
|
return
|
|
|
}
|
|
|
+
|
|
|
+ // 播放对应的录音音频
|
|
|
+ self?.playRecord(at: time)
|
|
|
+
|
|
|
self?.currentAssetProgress = time
|
|
|
BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
|
|
|
if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
|
|
@@ -615,8 +624,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
self?.progreddL.text = String(format: "%.2f / %.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
|
|
|
}
|
|
|
}
|
|
|
- // 播放对应的录音音频
|
|
|
- self?.playRecord(at: time)
|
|
|
} as? NSKeyValueObservation
|
|
|
}
|
|
|
|
|
@@ -650,7 +657,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func changeProgress(progress:Float) {
|
|
|
if let duration = assetPlayer?.currentItem?.duration {
|
|
|
- assetPlayer!.seek(to: CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) {[weak self] finished in
|
|
|
+ self.currentAssetProgress = CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100)
|
|
|
+ assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) {[weak self] finished in
|
|
|
if finished{
|
|
|
BFLog(1, message: "拖动成功")
|
|
|
self?.movie?.startProcessing()
|
|
@@ -661,7 +669,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func drawOrUpdateRecordProgessLable(){
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
- progessSildeBackV.subviews.forEach { vv in
|
|
|
+ self?.progessSildeBackV.subviews.forEach { vv in
|
|
|
vv.removeFromSuperview()
|
|
|
}
|
|
|
if let totalDur = self?.asset?.duration, totalDur > 0, let list = self?.recordList {
|
|
@@ -669,7 +677,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
let height = self?.progessSildeBackV.height ?? 0
|
|
|
list.forEach { model in
|
|
|
let lineV = UIView(frame: CGRect(x: model.startTime * width / totalDur , y: 0, width: (model.endTime - model.startTime) * width / totalDur, height: height))
|
|
|
- lineV.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67")
|
|
|
+ lineV.backgroundColor = ThemeStyleGreen
|
|
|
self?.progessSildeBackV.addSubview(lineV)
|
|
|
}
|
|
|
}
|