BFRecordScreenController.swift 27 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697
  1. //
  2. // BFRecordScreenController.swift
  3. // BFRecordScreenKit_Example
  4. //
  5. // Created by 胡志强 on 2021/11/23.
  6. // Copyright © 2021 CocoaPods. All rights reserved.
  7. //
  8. import Foundation
  9. import BFUIKit
  10. import GPUImage
  11. import Photos
  12. import BFCommonKit
  13. import BFFramework
  14. import UIKit
  15. public class BFRecordScreenController: BFBaseViewController {
  16. public var nextActionHandle:(()->Void)?
  17. public var closeActionHandle:(()->Void)?
  18. // MARK: - 录制参数
  19. public var asset:PHAsset?
  20. // var shouldPlayRecordIndex:Int = -1 // 当前应该播放的录音资源序号
  21. var currentPlayRecordIndex:Int = -1 // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
  22. var isRecording = false // 是否正在录音
  23. var isNormalPlaying = false { // 是否正在播放
  24. didSet{
  25. playBtn.isSelected = isNormalPlaying
  26. }
  27. }
  28. var currentAssetProgress : CMTime = .zero // 当前素材播放的进度
  29. // 视频素材
  30. public var avasset:AVURLAsset?
  31. public var recordList:[PQVoiceModel] = [PQVoiceModel]()
  32. var assetPlayer:AVPlayer? // 原视频音频播放器
  33. var isCompletePlay = true
  34. var hadPrepareToPlayRecord = false // 录音播放器准备
  35. var recordPlayer:AVPlayer? // 录音音频播放器
  36. var movie :GPUImageMovie? // 视频预览
  37. var playView :GPUImageView? // 视频展示视图
  38. var isDragingProgressSlder : Bool = false // 是否在拖动进度条
  39. //定义音频的编码参数
  40. let recordSettings:[String : Any] = [AVSampleRateKey : 44100.0, //声音采样率
  41. AVFormatIDKey : kAudioFormatLinearPCM, //编码格式
  42. AVNumberOfChannelsKey : 1, //采集音轨
  43. AVEncoderBitDepthHintKey: 16, // 位深
  44. AVEncoderAudioQualityKey : AVAudioQuality.medium.rawValue] //音频质量
  45. // 录音相关
  46. lazy var recorderManager : BFVoiceRecordManager = {
  47. let manager = BFVoiceRecordManager()
  48. manager.cancelRecordHandle = { error in
  49. }
  50. manager.endRecordHandle = {[weak self] (isTimeout, model) in
  51. if let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
  52. // 加入到语音数组里
  53. let ass = AVURLAsset(url: URL(fileURLWithPath: model.wavFilePath))
  54. model.endTime = model.startTime + CMTimeGetSeconds(ass.duration)
  55. // TODO: 原逻辑要删除新录音后边的数据, 新逻辑是插入覆盖
  56. while let m = self?.recordList.last{
  57. if model.startTime < m.startTime {
  58. self?.recordList.removeLast()
  59. }else if m.endTime > model.startTime {
  60. m.endTime = model.startTime
  61. }else{
  62. break
  63. }
  64. }
  65. BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
  66. self?.recordList.append(model)
  67. self?.drawOrUpdateRecordProgessLable()
  68. self?.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
  69. }
  70. }
  71. return manager
  72. }()
  73. // MARK: - 视图参数
  74. var beginOnStartBtn:Bool = false
  75. var touchStart:CGPoint = CGPoint(x: 0, y: 0)
  76. var avplayerTimeObserver: NSKeyValueObservation?
  77. lazy var progreddL : UILabel = {
  78. let l = UILabel(frame: CGRect(x: 0, y: cDevice_iPhoneStatusBarHei, width: cScreenWidth, height: 14))
  79. l.textAlignment = .center
  80. l.font = UIFont.systemFont(ofSize: 10)
  81. l.textColor = .white
  82. l.shadowColor = .black
  83. l.shadowOffset = CGSize(width: 1, height: 1)
  84. return l
  85. }()
  86. lazy var playBtn:UIButton = {
  87. let btn = UIButton(frame: view.bounds)
  88. btn.addTarget(self, action: #selector(playVideo(btn:)), for: .touchUpInside)
  89. return btn
  90. }()
  91. lazy var bottomeView:UIImageView = {
  92. let iv = UIImageView(image: imageInRecordScreenKit(by: "bottom_shadow"))
  93. iv.contentMode = .scaleAspectFill
  94. iv.isUserInteractionEnabled = true
  95. // 拖曳手勢
  96. let pan = UIPanGestureRecognizer(target: self, action: #selector(pan(recognizer:)))
  97. pan.delegate = self
  98. pan.minimumNumberOfTouches = 1
  99. pan.maximumNumberOfTouches = 1
  100. iv.addGestureRecognizer(pan)
  101. return iv
  102. }()
  103. lazy var recordBtn:UIButton = {
  104. let btn = UIButton(type: .custom)
  105. btn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
  106. btn.adjustsImageWhenHighlighted = false
  107. btn.addTarget(self, action: #selector(startRecord), for: .touchDown)
  108. btn.addTarget(self, action: #selector(endRecord), for: .touchUpInside)
  109. return btn
  110. }()
  111. lazy var progessSildeBackV : UIView = {
  112. let vv = UIView()
  113. vv.backgroundColor = UIColor.hexColor(hexadecimal: "#303030")
  114. return vv
  115. }()
  116. lazy var progessSilde:BFPlayerSlider = {
  117. let sliderView = BFPlayerSlider()
  118. let tjbV = UIView(frame: CGRect(x: 0, y: 0, width: 4, height: 16))
  119. tjbV.backgroundColor = .white
  120. let thbImage = tjbV.graphicsGetImage()//UIImage(named: "icon_point")
  121. sliderView.setMinimumTrackImage(thbImage, for: .normal)
  122. sliderView.setMaximumTrackImage(thbImage, for: .normal)
  123. sliderView.setThumbImage(thbImage, for: .highlighted)
  124. sliderView.setThumbImage(thbImage, for: .normal)
  125. sliderView.maximumTrackTintColor = .clear // UIColor.hexColor(hexadecimal: "#303030")
  126. sliderView.minimumTrackTintColor = .clear //UIColor.hexColor(hexadecimal: "#303030")
  127. // sliderView.minimumTrackTintColor = UIColor.hexColor(hexadecimal: "#FA6400")
  128. sliderView.addTarget(self, action: #selector(sliderTouchBegan(sender:)), for: .touchDown)
  129. sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpInside)
  130. sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchUpOutside)
  131. sliderView.addTarget(self, action: #selector(sliderTouchEnded(sender:)), for: .touchCancel)
  132. return sliderView
  133. }()
  134. lazy var closeBtn:UIButton = {
  135. let btn = UIButton(type: .custom)
  136. btn.setImage(imageInRecordScreenKit(by: "xx"), for: .normal)
  137. btn.addTarget(self, action: #selector(closePage), for: .touchUpInside)
  138. return btn
  139. }()
  140. lazy var nextBtn:UIButton = {
  141. let btn = UIButton(type: .custom)
  142. btn.setImage(imageInRecordScreenKit(by: "gou"), for: .normal)
  143. btn.addTarget(self, action: #selector(nextAction), for: .touchUpInside)
  144. return btn
  145. }()
  146. lazy var toolV : BFIntroduceToolView = {
  147. let toolV = BFIntroduceToolView()
  148. toolV.centerY = view.centerY
  149. toolV.choosedToolHandle = {[weak self] tool in
  150. guard let sself = self else {
  151. return
  152. }
  153. tool.center = sself.view.center
  154. sself.view.addSubview(tool)
  155. }
  156. return toolV
  157. }()
  158. //MARK: ------------------ 生命周期
  159. deinit {
  160. cleanMovieTarget()
  161. NotificationCenter.default.removeObserver(self)
  162. avplayerTimeObserver?.invalidate()
  163. recorderManager.stopRecord(isCancel: true)
  164. assetPlayer?.pause()
  165. recordPlayer?.pause()
  166. }
  167. public override func viewWillAppear(_ animated: Bool) {
  168. super.viewWillAppear(animated)
  169. self.navigationController?.isNavigationBarHidden = true
  170. hiddenNavigation()
  171. }
  172. public override func viewDidLoad(){
  173. super.viewDidLoad()
  174. _ = disablePopGesture()
  175. view.backgroundColor = .lightGray
  176. playView = GPUImageView(frame: view.bounds)
  177. view.addSubview(playView!)
  178. fetchVideo()
  179. view.addSubview(playBtn)
  180. view.addSubview(bottomeView)
  181. view.addSubview(progreddL)
  182. // view.addSubview(toolV)
  183. bottomeView.addSubview(recordBtn)
  184. bottomeView.addSubview(progessSildeBackV)
  185. bottomeView.addSubview(progessSilde)
  186. bottomeView.addSubview(closeBtn)
  187. bottomeView.addSubview(nextBtn)
  188. if checkStatus() {
  189. try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
  190. }
  191. }
  192. public override func viewWillLayoutSubviews() {
  193. super.viewWillLayoutSubviews()
  194. bottomeView.snp.makeConstraints { make in
  195. make.left.bottom.right.equalToSuperview()
  196. make.height.equalTo(adapterWidth(width: 220))
  197. }
  198. recordBtn.snp.makeConstraints { make in
  199. make.width.height.equalTo(120)
  200. make.centerX.equalToSuperview()
  201. make.top.equalTo(27)
  202. }
  203. closeBtn.snp.makeConstraints { make in
  204. make.left.equalToSuperview()
  205. make.width.height.equalTo(60)
  206. make.top.equalTo(recordBtn.snp.bottom).offset(-10)
  207. }
  208. nextBtn.snp.makeConstraints { make in
  209. make.right.equalToSuperview()
  210. make.top.width.height.equalTo(closeBtn)
  211. }
  212. progessSildeBackV.snp.makeConstraints { make in
  213. make.left.equalTo(closeBtn.snp.right).offset(16)
  214. make.right.equalTo(nextBtn.snp.left).offset(-16)
  215. make.centerY.equalTo(closeBtn)
  216. make.height.equalTo(8)
  217. }
  218. progessSilde.snp.makeConstraints { make in
  219. make.left.right.centerY.equalTo(progessSildeBackV)
  220. make.height.equalTo(20)
  221. }
  222. }
  223. // MARK: - 按钮事件响应
  224. // 触发拖曳手势后,执行的动作
  225. @objc func pan(recognizer: UIPanGestureRecognizer) {
  226. // 设置 UIView 新的位置
  227. if !checkStatus(show: false) {
  228. return
  229. }
  230. let point = recognizer.location(in: bottomeView)
  231. switch recognizer.state {
  232. case .began:
  233. touchStart = recognizer.location(in: bottomeView)
  234. beginOnStartBtn = recordBtn.frame.contains(touchStart)
  235. case .changed:
  236. if beginOnStartBtn == true {
  237. let nowPoint = recognizer.location(in: bottomeView)
  238. BFLog(1, message: "nowPoint x: \(nowPoint.x) y:\(nowPoint.y)")
  239. if recordBtn.frame.contains(nowPoint) {
  240. } else {
  241. }
  242. }
  243. case .ended:
  244. BFLog(1, message: "移动结束 \(beginOnStartBtn)")
  245. if beginOnStartBtn == true {
  246. beginOnStartBtn = false
  247. if recordBtn.frame.contains(point) {
  248. // 结束录制
  249. endRecord()
  250. } else {
  251. cancleRecord()
  252. }
  253. }
  254. default:
  255. break
  256. }
  257. }
  258. @objc func startRecord(){
  259. recordBtn.setImage(imageInRecordScreenKit(by: "mic2"), for: .normal)
  260. BFLog(1, message: "start \(UIControl.Event.touchDown)")
  261. pause()
  262. let model = PQVoiceModel()
  263. model.startTime = CMTimeGetSeconds(self.currentAssetProgress)
  264. model.volume = 100
  265. recorderManager.voiceModel = model
  266. recorderManager.startRecord(index: recordList.count)
  267. movie?.startProcessing()
  268. assetPlayer?.volume = 0
  269. assetPlayer?.play()
  270. isRecording = true
  271. }
  272. @objc func endRecord(){
  273. recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
  274. // 存储录音
  275. recorderManager.endRecord()
  276. isRecording = false
  277. pause()
  278. }
  279. func cancleRecord(){
  280. recordBtn.setImage(imageInRecordScreenKit(by: "mic1"), for: .normal)
  281. recorderManager.cancleRecord()
  282. isRecording = false
  283. pause()
  284. }
  285. @objc func closePage(){
  286. pause()
  287. closeActionHandle?()
  288. }
  289. @objc func nextAction(){
  290. nextActionHandle?()
  291. pause()
  292. }
  293. @objc func playVideo(btn:UIButton){
  294. btn.isSelected ? pause() : play()
  295. }
  296. @objc public func sliderTouchBegan(sender _: UISlider) {
  297. isDragingProgressSlder = true
  298. pause()
  299. }
  300. @objc public func sliderTouchEnded(sender: UISlider) {
  301. changeProgress(progress: sender.value)
  302. isDragingProgressSlder = false
  303. currentPlayRecordIndex = -1
  304. hadPrepareToPlayRecord = false
  305. }
  306. // MARK: - 权限申请
  307. func checkStatus(show: Bool = true) -> Bool {
  308. let status = AVCaptureDevice.authorizationStatus(for: .audio)
  309. switch status {
  310. case .denied, .restricted:
  311. BFLog(message: "麦克风权限被拒绝,请在设置中打开")
  312. if show {
  313. let remindData = BFBaseModel()
  314. remindData.title = "票圈视频需要访问你的麦克风才能录音"
  315. remindData.summary = ""
  316. let remindView = BFRemindView(frame: CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenHeigth))
  317. remindView.isBanned = true
  318. remindView.confirmBtn.setTitle("去设置", for: .normal)
  319. UIApplication.shared.keyWindow?.addSubview(remindView)
  320. remindView.remindData = remindData
  321. remindView.remindBlock = { item, _ in
  322. if item.tag == 2 {
  323. openAppSetting()
  324. }
  325. }
  326. }
  327. return false
  328. case .authorized:
  329. return true
  330. case .notDetermined:
  331. if show {
  332. requestMicroPhoneAuth()
  333. }
  334. return false
  335. default:
  336. break
  337. }
  338. return false
  339. }
  340. func requestMicroPhoneAuth() {
  341. AVCaptureDevice.requestAccess(for: .audio) { granted in
  342. if !granted {
  343. BFLog(message: "麦克风权限被拒绝,请在设置中打开")
  344. }
  345. }
  346. }
  347. // MARK: - 音视频处理
  348. func playRecord(at currentT:CMTime){
  349. if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
  350. return
  351. }
  352. let (shouldPlayRecordIndex, recordedAudio) = recordList.enumerated().first { model in
  353. model.1.endTime > CMTimeGetSeconds(currentT)
  354. } ?? (-1, nil)
  355. guard let recordedAudio = recordedAudio else {
  356. return
  357. }
  358. BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime)")
  359. // 创建播放器
  360. if self.recordPlayer == nil || (self.recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString).lastPathComponent {
  361. let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
  362. if let player = self.recordPlayer {
  363. player.pause()
  364. if let playItem = player.currentItem {
  365. NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
  366. recordPlayer?.replaceCurrentItem(with: newItem)
  367. }
  368. }else {
  369. self.recordPlayer = AVPlayer(playerItem: newItem)
  370. }
  371. self.recordPlayer!.volume = 1
  372. // self.recordPlayer?.prepareToPlay()
  373. currentPlayRecordIndex = -1
  374. hadPrepareToPlayRecord = false
  375. BFLog(1, message: "录音播放器初始化(有时候不准)")
  376. NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self] notify in
  377. self?.hadPrepareToPlayRecord = false
  378. self?.currentPlayRecordIndex = -1
  379. }
  380. }
  381. synced(currentPlayRecordIndex) {
  382. if !hadPrepareToPlayRecord
  383. && CMTimeGetSeconds(currentT) >= recordedAudio.startTime
  384. && CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
  385. {
  386. // 应当开始播放了
  387. // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
  388. if currentPlayRecordIndex == -1 && self.isNormalPlaying{
  389. let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
  390. DispatchQueue.main.async {[weak self] in
  391. self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: {[weak self] finished in
  392. if finished && (self?.isNormalPlaying ?? false) {
  393. self?.recordPlayer?.play()
  394. BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero))")
  395. }
  396. })
  397. }
  398. currentPlayRecordIndex = shouldPlayRecordIndex
  399. hadPrepareToPlayRecord = true
  400. BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero))")
  401. }
  402. }
  403. }
  404. BFLog(1, message: "应当播放:\(shouldPlayRecordIndex), 当前播放:\(currentPlayRecordIndex)")
  405. // if let recordedAudio = recordedAudio {
  406. //
  407. //
  408. // if shouldPlayRecordIndex != currentPlayRecordIndex {
  409. // // 设置新的播放资源
  410. //
  411. //// self.recordPlayer.delegate = self
  412. // self.recordPlayer.play()
  413. //
  414. // } else {
  415. // // 更新播放进度
  416. // let second = CMTimeGetSeconds(duration) - recordedAudio.startTime
  417. // self.recordPlayer.seek(to: CMTime(seconds: second, preferredTimescale: 25), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000))
  418. // }
  419. // }
  420. }
  421. func play(){
  422. BFLog(1, message: "开始播放")
  423. isNormalPlaying = true
  424. assetPlayer?.volume = 0.2
  425. movie?.startProcessing()
  426. self.assetPlayer?.play()
  427. }
  428. func pause(){
  429. BFLog(1, message: "暂停播放")
  430. isNormalPlaying = false
  431. movie?.cancelProcessing()
  432. assetPlayer?.pause()
  433. recordPlayer?.pause()
  434. let second = self.currentAssetProgress
  435. assetPlayer?.seek(to: second , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
  436. })
  437. }
  438. func fetchVideo(){
  439. if let asset = self.asset {
  440. let options = PHVideoRequestOptions()
  441. options.isNetworkAccessAllowed = true
  442. options.deliveryMode = .automatic
  443. PHImageManager.default().requestPlayerItem(forVideo:asset, options: options, resultHandler: { [weak self] playerItem, info in
  444. guard let item = playerItem else {
  445. cShowHUB(superView: nil, msg: "视频获取失败")
  446. return
  447. }
  448. self?.setAudioPlay(item: item)
  449. self?.setVideoPlay(item: item)
  450. })
  451. let option = PHImageRequestOptions()
  452. option.isNetworkAccessAllowed = true //允许下载iCloud的图片
  453. option.resizeMode = .fast
  454. option.deliveryMode = .highQualityFormat
  455. PHImageManager.default().requestImage(for: asset,
  456. targetSize: self.view.bounds.size,
  457. contentMode: .aspectFit,
  458. options: option)
  459. { (image, nil) in
  460. //image就是图片
  461. if image != nil {
  462. let pic = GPUImagePicture(image: image)
  463. let filet = GPUImageFilter()
  464. pic?.addTarget(filet)
  465. filet.addTarget(self.playView)
  466. pic?.processImage()
  467. }
  468. }
  469. // 使用copy资源到本地的方式
  470. // let outFilePath = NSHomeDirectory().appending("/Documents/simple.mp4")
  471. // let outFileUrl = URL(fileURLWithPath: outFilePath)
  472. // if FileManager.default.fileExists(atPath: outFilePath) {
  473. // try? FileManager.default.removeItem(atPath: outFilePath)
  474. // }
  475. //
  476. // let assetResources = PHAssetResource.assetResources(for: asset)
  477. // if let rsc = assetResources.first(where: { res in
  478. // res.type == .video || res.type == .pairedVideo
  479. // }) {
  480. // PHAssetResourceManager.default().writeData(for: rsc, toFile:outFileUrl, options: nil) {[weak self] error in
  481. // if error == nil {
  482. // DispatchQueue.main.async {[weak self] in
  483. // self?.setVideoPlay(url: outFileUrl)
  484. // self?.setAudioPlay(url: outFileUrl)
  485. // }
  486. // }else{
  487. // BFLog(1, message: "导出视频相exportAsynchro faile")
  488. // }
  489. // }
  490. // }else{
  491. //
  492. // }
  493. PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
  494. if let urlass = asset as? AVURLAsset {
  495. self?.avasset = urlass
  496. }
  497. })
  498. }
  499. }
  500. func setVideoPlay(item:AVPlayerItem){
  501. if movie != nil {
  502. cleanMovieTarget()
  503. }
  504. movie = GPUImageMovie(playerItem: item)
  505. // movie?.runBenchmark = true
  506. movie?.playAtActualSpeed = true
  507. let filter = GPUImageFilter()
  508. movie?.addTarget(filter)
  509. filter.addTarget(playView)
  510. }
  511. func setAudioPlay(item:AVPlayerItem){
  512. if let playItem = assetPlayer?.currentItem {
  513. NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
  514. assetPlayer?.replaceCurrentItem(with: item)
  515. }else {
  516. assetPlayer = AVPlayer(playerItem: item)
  517. avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
  518. // 进度监控
  519. if !((self?.isNormalPlaying ?? false) || (self?.isRecording ?? false)) {
  520. return
  521. }
  522. self?.currentAssetProgress = time
  523. BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
  524. if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
  525. DispatchQueue.main.async { [weak self] in
  526. self?.progessSilde.value = Float(CMTimeGetSeconds(time) / CMTimeGetSeconds(item.duration))
  527. self?.progreddL.text = String(format: "%.2f / %.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
  528. }
  529. }
  530. // 播放对应的录音音频
  531. self?.playRecord(at: time)
  532. } as? NSKeyValueObservation
  533. }
  534. NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer?.currentItem, queue: .main) { [weak self] notify in
  535. BFLog(1, message: "AVPlayerItemDidPlayToEndTime = \(notify)")
  536. self?.isNormalPlaying = false
  537. self?.assetPlayer?.seek(to: CMTime.zero)
  538. self?.currentPlayRecordIndex = -1
  539. if self?.isRecording ?? false {
  540. self?.endRecord()
  541. }
  542. }
  543. }
  544. func cleanMovieTarget(){
  545. movie?.cancelProcessing()
  546. movie?.targets().forEach({ target in
  547. if let objc = target as? GPUImageOutput{
  548. objc.removeAllTargets()
  549. }
  550. })
  551. movie?.removeAllTargets()
  552. movie?.removeFramebuffer()
  553. GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
  554. }
  555. //MARK: - 录音对应图像绘制
  556. func changeProgress(progress:Float) {
  557. if let duration = assetPlayer?.currentItem?.duration {
  558. assetPlayer!.seek(to: CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) {[weak self] finished in
  559. if finished{
  560. BFLog(1, message: "拖动成功")
  561. self?.movie?.startProcessing()
  562. }
  563. }
  564. }
  565. }
  566. func drawOrUpdateRecordProgessLable(){
  567. DispatchQueue.main.async {[weak self] in
  568. progessSildeBackV.subviews.forEach { vv in
  569. vv.removeFromSuperview()
  570. }
  571. if let totalDur = self?.asset?.duration, totalDur > 0, let list = self?.recordList {
  572. let width = self?.progessSildeBackV.width ?? 0
  573. let height = self?.progessSildeBackV.height ?? 0
  574. list.forEach { model in
  575. let lineV = UIView(frame: CGRect(x: model.startTime * width / totalDur , y: 0, width: (model.endTime - model.startTime) * width / totalDur, height: height))
  576. lineV.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67")
  577. self?.progessSildeBackV.addSubview(lineV)
  578. }
  579. }
  580. }
  581. }
  582. }
  583. extension BFRecordScreenController:GPUImageMovieDelegate {
  584. public func didCompletePlayingMovie(){
  585. BFLog(1, message: "播放结束")
  586. currentPlayRecordIndex = -1
  587. }
  588. }
  589. extension BFRecordScreenController:AVAudioRecorderDelegate {
  590. public func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
  591. BFLog(1, message: "录音结束")
  592. }
  593. }
  594. extension BFRecordScreenController : AVAudioPlayerDelegate {
  595. public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
  596. BFLog(1, message: "录音播放结束")
  597. }
  598. }