|
@@ -11,21 +11,64 @@ import BFUIKit
|
|
|
import GPUImage
|
|
|
import Photos
|
|
|
import BFCommonKit
|
|
|
+import BFFramework
|
|
|
|
|
|
public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
+ public var nextActionHandle:(()->Void)?
|
|
|
+ public var closeActionHandle:(()->Void)?
|
|
|
public var asset:PHAsset?
|
|
|
|
|
|
- var audioPlayer:AVPlayer? // 原视频音频播放
|
|
|
+ var assetPlayer:AVPlayer? // 原视频音频播放器
|
|
|
+ lazy var recordPlayer:AVAudioPlayer = {// 录音音频播放器
|
|
|
+ let player = AVAudioPlayer()
|
|
|
+ player.volume = 1
|
|
|
+ return player
|
|
|
+
|
|
|
+ }()
|
|
|
var movie :GPUImageMovie? // 视频预览
|
|
|
var playView :GPUImageView? // 视频展示视图
|
|
|
var isDragingProgressSlder : Bool = false
|
|
|
|
|
|
+ //定义音频的编码参数
|
|
|
+ let recordSettings:[String : Any] = [AVSampleRateKey : 44100.0, //声音采样率
|
|
|
+ AVFormatIDKey : kAudioFormatLinearPCM, //编码格式
|
|
|
+ AVNumberOfChannelsKey : 1, //采集音轨
|
|
|
+ AVEncoderBitDepthHintKey: 16, // 位深
|
|
|
+ AVEncoderAudioQualityKey : AVAudioQuality.medium.rawValue] //音频质量
|
|
|
+
|
|
|
+
|
|
|
// 录音相关
|
|
|
- var record:AVAudioRecorder?
|
|
|
+ lazy var recorderManager : BFRecordManager = {
|
|
|
+
|
|
|
+ let manager = BFRecordManager(voideModeL: PQVoiceModel())
|
|
|
+ manager.cancelRecordHandle = { error in
|
|
|
+
|
|
|
+ }
|
|
|
+ manager.endRecordHandle = {[weak self] (isTimeout, model) in
|
|
|
+ if FileManager.default.fileExists(atPath: model?.wavFilePath ?? ""){
|
|
|
+ // 加入到语音数组里
|
|
|
+ while let m = self?.recordList.last{
|
|
|
+ if m.startTime < model!.startTime {
|
|
|
+ self?.recordList.removeLast()
|
|
|
+ }else if m.endTime < model!.startTime {
|
|
|
+ m.endTime = model!.startTime
|
|
|
+ }else{
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+ self?.recordList.append(model!)
|
|
|
+ self?.drewRecordProgessLable()
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ return manager
|
|
|
+ }()
|
|
|
+
|
|
|
var beginOnStartBtn:Bool = false
|
|
|
var touchStart:CGPoint = CGPoint(x: 0, y: 0)
|
|
|
var avplayerTimeObserver: NSKeyValueObservation?
|
|
|
+ var recordList:[PQVoiceModel] = [PQVoiceModel]()
|
|
|
|
|
|
lazy var playBtn:UIButton = {
|
|
|
let btn = UIButton(frame: view.bounds)
|
|
@@ -58,13 +101,16 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var progessSilde:BFPlayerSlider = {
|
|
|
let sliderView = BFPlayerSlider()
|
|
|
- let thbImage = UIImage(named: "icon_point")
|
|
|
+ let tjbV = UIView(frame: CGRect(x: 0, y: 0, width: 4, height: 16))
|
|
|
+ tjbV.backgroundColor = .white
|
|
|
+ let thbImage = tjbV.graphicsGetImage()//UIImage(named: "icon_point")
|
|
|
sliderView.setMinimumTrackImage(thbImage, for: .normal)
|
|
|
sliderView.setMaximumTrackImage(thbImage, for: .normal)
|
|
|
sliderView.setThumbImage(thbImage, for: .highlighted)
|
|
|
sliderView.setThumbImage(thbImage, for: .normal)
|
|
|
sliderView.maximumTrackTintColor = UIColor.hexColor(hexadecimal: "#303030")
|
|
|
- sliderView.minimumTrackTintColor = UIColor.hexColor(hexadecimal: "#FA6400")
|
|
|
+ sliderView.minimumTrackTintColor = UIColor.hexColor(hexadecimal: "#303030")
|
|
|
+// sliderView.minimumTrackTintColor = UIColor.hexColor(hexadecimal: "#FA6400")
|
|
|
sliderView.addTarget(self, action: #selector(sliderTouchBegan(sender:)), for: .touchDown)
|
|
|
sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpInside)
|
|
|
sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpOutside)
|
|
@@ -75,7 +121,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
lazy var closeBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
btn.setImage(imageInRecordScreenKit(by: "xx"), for: .normal)
|
|
|
- btn.contentHorizontalAlignment = .right
|
|
|
+// btn.contentHorizontalAlignment = .right
|
|
|
btn.addTarget(self, action: #selector(backBtnClick), for: .touchUpInside)
|
|
|
return btn
|
|
|
}()
|
|
@@ -83,7 +129,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
lazy var nextBtn:UIButton = {
|
|
|
let btn = UIButton(type: .custom)
|
|
|
btn.setImage(imageInRecordScreenKit(by: "gou"), for: .normal)
|
|
|
- btn.contentHorizontalAlignment = .left
|
|
|
+// btn.contentHorizontalAlignment = .left
|
|
|
btn.addTarget(self, action: #selector(nextAction), for: .touchUpInside)
|
|
|
return btn
|
|
|
}()
|
|
@@ -93,7 +139,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
cleanMovieTarget()
|
|
|
NotificationCenter.default.removeObserver(self)
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
-
|
|
|
+ recorderManager.stopRecord(isCancel: true)
|
|
|
+ assetPlayer?.pause()
|
|
|
+ recordPlayer.pause()
|
|
|
+
|
|
|
}
|
|
|
|
|
|
public override func viewWillAppear(_ animated: Bool) {
|
|
@@ -120,7 +169,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
bottomeView.addSubview(closeBtn)
|
|
|
bottomeView.addSubview(nextBtn)
|
|
|
|
|
|
- _ = checkStatus()
|
|
|
+ if checkStatus() {
|
|
|
+ try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
|
|
|
+ }
|
|
|
+
|
|
|
}
|
|
|
|
|
|
public override func viewWillLayoutSubviews() {
|
|
@@ -147,8 +199,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
progessSilde.snp.makeConstraints { make in
|
|
|
- make.left.equalTo(closeBtn.snp.right).offset(6)
|
|
|
- make.right.equalTo(nextBtn.snp.left).offset(-6)
|
|
|
+ make.left.equalTo(closeBtn.snp.right).offset(16)
|
|
|
+ make.right.equalTo(nextBtn.snp.left).offset(-16)
|
|
|
make.centerY.equalTo(closeBtn)
|
|
|
make.height.equalTo(20)
|
|
|
}
|
|
@@ -201,24 +253,32 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
@objc func startRecord(btn:UIButton){
|
|
|
btn.setImage(imageInRecordScreenKit(by: "mic2"), for: .normal)
|
|
|
BFLog(1, message: "start \(UIControl.Event.touchDown)")
|
|
|
+ let model = PQVoiceModel()
|
|
|
+ model.startTime = CMTimeGetSeconds(assetPlayer?.currentItem?.currentTime() ?? CMTime.zero)
|
|
|
+ recorderManager.voiceModel? = model
|
|
|
+ recorderManager.startRecord(index: recordList.count)
|
|
|
+ movie?.startProcessing()
|
|
|
+ assetPlayer?.volume = 0
|
|
|
+ assetPlayer?.play()
|
|
|
}
|
|
|
|
|
|
@objc func endRecord(btn:UIButton){
|
|
|
- cancleRecord()
|
|
|
+ recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
|
|
|
// 存储录音
|
|
|
+ recorderManager.endRecord()
|
|
|
+ pause()
|
|
|
}
|
|
|
|
|
|
func cancleRecord(){
|
|
|
recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
|
|
|
- BFLog(1, message: "cancel ")
|
|
|
}
|
|
|
@objc func close(){
|
|
|
pause()
|
|
|
- backBtnClick()
|
|
|
+ closeActionHandle?()
|
|
|
}
|
|
|
|
|
|
@objc func nextAction(){
|
|
|
- pause()
|
|
|
+ nextActionHandle?()
|
|
|
}
|
|
|
|
|
|
@objc func playVideo(btn:UIButton){
|
|
@@ -233,14 +293,14 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
@objc public func sliderTouchBegan(sender _: UISlider) {
|
|
|
- isDragingProgressSlder = true
|
|
|
- pause()
|
|
|
+// isDragingProgressSlder = true
|
|
|
+// pause()
|
|
|
}
|
|
|
|
|
|
@objc public func sliderTouchEnded(sender: UISlider) {
|
|
|
- changeProgress(progress: sender.value)
|
|
|
- isDragingProgressSlder = false
|
|
|
- play()
|
|
|
+// changeProgress(progress: sender.value)
|
|
|
+// isDragingProgressSlder = false
|
|
|
+// play()
|
|
|
}
|
|
|
|
|
|
// MARK: - 权限申请
|
|
@@ -259,7 +319,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
remindView.confirmBtn.setTitle("去设置", for: .normal)
|
|
|
UIApplication.shared.keyWindow?.addSubview(remindView)
|
|
|
remindView.remindData = remindData
|
|
|
- remindView.remindBlock = { [weak self] item, _ in
|
|
|
+ remindView.remindBlock = { item, _ in
|
|
|
if item.tag == 2 {
|
|
|
openAppSetting()
|
|
|
}
|
|
@@ -289,17 +349,29 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
// MARK: - 音视频处理
|
|
|
+ func playRecord(at duration:CMTime){
|
|
|
+ if recordList.count > 0, let player = try? AVAudioPlayer(contentsOf: URL(fileURLWithPath: (recordList.first?.wavFilePath)!)) {
|
|
|
+ self.recordPlayer = player
|
|
|
+ self.recordPlayer.volume = 1
|
|
|
+ self.recordPlayer.play()
|
|
|
+ }else{
|
|
|
+ self.recordPlayer.pause()
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
func play(){
|
|
|
cShowHUB(superView: nil, msg: "开始播放")
|
|
|
+ assetPlayer?.volume = 0.5
|
|
|
movie?.startProcessing()
|
|
|
- audioPlayer?.play()
|
|
|
+ assetPlayer?.play()
|
|
|
+ playRecord(at: CMTime.zero)
|
|
|
}
|
|
|
|
|
|
func pause(){
|
|
|
cShowHUB(superView: nil, msg: "暂停播放")
|
|
|
movie?.cancelProcessing()
|
|
|
- audioPlayer?.pause()
|
|
|
+ assetPlayer?.pause()
|
|
|
+ recordPlayer.pause()
|
|
|
}
|
|
|
|
|
|
func fetchVideo(){
|
|
@@ -383,12 +455,12 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func setAudioPlay(item:AVPlayerItem){
|
|
|
|
|
|
- if let playItem = audioPlayer?.currentItem {
|
|
|
+ if let playItem = assetPlayer?.currentItem {
|
|
|
NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
|
|
|
- audioPlayer?.replaceCurrentItem(with: item)
|
|
|
+ assetPlayer?.replaceCurrentItem(with: item)
|
|
|
}else {
|
|
|
- audioPlayer = AVPlayer(playerItem: item)
|
|
|
- avplayerTimeObserver = audioPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 10), queue: DispatchQueue.global()) {[weak self] time in
|
|
|
+ assetPlayer = AVPlayer(playerItem: item)
|
|
|
+ avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 10), queue: DispatchQueue.global()) {[weak self] time in
|
|
|
// 进度监控
|
|
|
BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
|
|
|
if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
|
|
@@ -400,9 +472,9 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
|
|
|
- NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: audioPlayer?.currentItem, queue: .main) { [weak self] notify in
|
|
|
+ NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer?.currentItem, queue: .main) { [weak self] notify in
|
|
|
BFLog(message: "AVPlayerItemDidPlayToEndTime = \(notify)")
|
|
|
- self?.audioPlayer?.seek(to: kCMTimeZero)
|
|
|
+ self?.assetPlayer?.seek(to: CMTime.zero)
|
|
|
self?.playBtn.isSelected = false
|
|
|
}
|
|
|
}
|
|
@@ -419,7 +491,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
|
|
|
func changeProgress(progress:Float) {
|
|
|
- if let item = audioPlayer?.currentItem {
|
|
|
+ if let item = assetPlayer?.currentItem {
|
|
|
let duration = CMTimeGetSeconds(item.duration)
|
|
|
item.seek(to: CMTime(value: CMTimeValue(progress * Float(duration) * 100), timescale: 100)) { finished in
|
|
|
if finished{
|
|
@@ -428,6 +500,12 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+ //MARK: - 录音对应图像绘制
|
|
|
+
|
|
|
+ func drewRecordProgessLable(){
|
|
|
+
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
extension BFRecordScreenController:GPUImageMovieDelegate {
|
|
@@ -436,3 +514,7 @@ extension BFRecordScreenController:GPUImageMovieDelegate {
|
|
|
|
|
|
}
|
|
|
}
|
|
|
+
|
|
|
+extension BFRecordScreenController:AVAudioRecorderDelegate {
|
|
|
+
|
|
|
+}
|