BFRecordScreenController.swift 30 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791
  1. //
  2. // BFRecordScreenController.swift
  3. // BFRecordScreenKit_Example
  4. //
  5. // Created by 胡志强 on 2021/11/23.
  6. // Copyright © 2021 CocoaPods. All rights reserved.
  7. //
  8. import Foundation
  9. import BFUIKit
  10. import GPUImage
  11. import Photos
  12. import BFCommonKit
  13. import BFFramework
  14. import UIKit
  15. public class BFRecordScreenController: BFBaseViewController {
  16. public var nextActionHandle:(()->Void)?
  17. public var closeActionHandle:(()->Void)?
  18. // MARK: - 录制参数
  19. public var assets = [PHAsset]()
  20. var currItemModelIndex = 0
  21. public var itemModels = [BFRecordItemModel]()
  22. // var shouldPlayRecordIndex:Int = -1 // 当前应该播放的录音资源序号
  23. var currentPlayRecordIndex:Int = -1 // >= 0 :当前正在播放的录音资源序号; -3: 刚录音完,不需要播放录音; -1:初始化阶段
  24. var isRecording = false { // 是否正在录音
  25. didSet{
  26. withDrawBtn.isHidden = isRecording
  27. changeVoiceBtn.isHidden = isRecording
  28. recordBtn.setTitle(isRecording ? "松手 完成" :"按住 说话", for: .normal)
  29. recordBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#28BE67", alpha: isRecording ? 0.6 : 1)
  30. }
  31. }
  32. var isNormalPlaying = false { // 是否正在播放
  33. didSet{
  34. playBtn.isSelected = isNormalPlaying
  35. }
  36. }
  37. var currentAssetProgress : CMTime = .zero // 当前素材播放的进度
  38. // 视频素材
  39. public var avasset:AVURLAsset?
  40. // public var recordList:[PQVoiceModel] = [PQVoiceModel]()
  41. var assetPlayer:AVPlayer? // 原视频音频播放器
  42. var isCompletePlay = true
  43. var hadPrepareToPlayRecord = false // 录音播放器准备
  44. var recordPlayer:AVPlayer? // 录音音频播放器
  45. var movie :GPUImageMovie? // 视频预览
  46. var playView :GPUImageView? // 视频展示视图
  47. var isDragingProgressSlder : Bool = false // 是否在拖动进度条
  48. //定义音频的编码参数
  49. let recordSettings:[String : Any] = [AVSampleRateKey : 44100.0, //声音采样率
  50. AVFormatIDKey : kAudioFormatLinearPCM, //编码格式
  51. AVNumberOfChannelsKey : 1, //采集音轨
  52. AVEncoderBitDepthHintKey: 16, // 位深
  53. AVEncoderAudioQualityKey : AVAudioQuality.medium.rawValue] //音频质量
  54. // 录音相关
  55. lazy var recorderManager : BFVoiceRecordManager = {
  56. let manager = BFVoiceRecordManager()
  57. manager.cancelRecordHandle = { error in
  58. }
  59. manager.endRecordHandle = {[weak self] (model, error) in
  60. if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? ""){
  61. // 加入到语音数组里
  62. // TODO: 原逻辑要删除新录音后边的数据, 新逻辑是覆盖则删除
  63. var index = sself.itemModels[sself.currItemModelIndex].voiceStickers.count - 1
  64. while index >= 0{
  65. let m = sself.itemModels[sself.currItemModelIndex].voiceStickers[index]
  66. index -= 1
  67. if model.endTime > m.startTime && model.endTime <= m.endTime
  68. || model.startTime <= m.startTime && model.startTime > m.endTime{
  69. sself.itemModels[sself.currItemModelIndex].voiceStickers.remove(at: index+1)
  70. continue
  71. }
  72. if model.startTime < m.endTime {
  73. break
  74. }
  75. }
  76. BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
  77. sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
  78. sself.drawOrUpdateRecordProgessLable()
  79. sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
  80. }
  81. }
  82. return manager
  83. }()
  84. // MARK: - 视图参数
  85. var beginOnStartBtn:Bool = false
  86. var touchStart:CGPoint = CGPoint(x: 0, y: 0)
  87. var avplayerTimeObserver: NSKeyValueObservation?
  88. lazy var progreddL : UILabel = {
  89. let l = UILabel()
  90. l.textAlignment = .center
  91. l.font = UIFont.systemFont(ofSize: 10)
  92. l.textColor = .white
  93. l.shadowColor = .black
  94. l.shadowOffset = CGSize(width: 1, height: 1)
  95. return l
  96. }()
  97. lazy var playBtn:UIButton = {
  98. let btn = UIButton(frame: view.bounds)
  99. btn.addTarget(self, action: #selector(playVideo(btn:)), for: .touchUpInside)
  100. return btn
  101. }()
  102. lazy var bottomeView:UIImageView = {
  103. let iv = UIImageView(image: imageInRecordScreenKit(by: "bottom_shadow"))
  104. iv.contentMode = .scaleAspectFill
  105. iv.isUserInteractionEnabled = true
  106. // 拖曳手勢
  107. let pan = UIPanGestureRecognizer(target: self, action: #selector(pan(recognizer:)))
  108. pan.delegate = self
  109. pan.minimumNumberOfTouches = 1
  110. pan.maximumNumberOfTouches = 1
  111. iv.addGestureRecognizer(pan)
  112. return iv
  113. }()
  114. lazy var recordBtn:UIButton = {
  115. let btn = UIButton(type: .custom)
  116. btn.backgroundColor = ThemeStyleGreen()
  117. btn.setTitle("按住 说话", for: .normal)
  118. btn.adjustsImageWhenHighlighted = false
  119. btn.addTarget(self, action: #selector(startRecord), for: .touchDown)
  120. btn.addTarget(self, action: #selector(endRecord), for: .touchUpInside)
  121. return btn
  122. }()
  123. // lazy var progessSildeBackV : UIView = {
  124. // let vv = UIView()
  125. // vv.backgroundColor = .orange // .clear
  126. //
  127. // return vv
  128. // }()
  129. lazy var withDrawBtn:UIButton = {
  130. let btn = UIButton(type: .custom)
  131. btn.setImage(imageInRecordScreenKit(by: "withdraw_n"), for: .normal)
  132. btn.setImage(imageInRecordScreenKit(by: "withdraw_h"), for: .highlighted)
  133. btn.setTitle("撤销", for: .normal)
  134. btn.setTitleColor(.white, for: .normal)
  135. btn.setTitleColor(.gray, for: .highlighted)
  136. btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
  137. btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
  138. btn.addTarget(self, action: #selector(withdrawAction), for: .touchUpInside)
  139. return btn
  140. }()
  141. lazy var changeVoiceBtn:UIButton = {
  142. let btn = UIButton(type: .custom)
  143. btn.setImage(imageInRecordScreenKit(by: "changeVoice_n"), for: .normal)
  144. btn.setImage(imageInRecordScreenKit(by: "changeVoice_h"), for: .highlighted)
  145. btn.setTitle("变声", for: .normal)
  146. btn.setTitleColor(.white, for: .normal)
  147. btn.setTitleColor(ThemeStyleGreen(), for: .highlighted)
  148. btn.titleLabel?.font = UIFont.systemFont(ofSize: 12)
  149. btn.contentVerticalAlignment = UIControl.ContentVerticalAlignment.center;
  150. btn.addTarget(self, action: #selector(changeVoiceAction), for: .touchUpInside)
  151. return btn
  152. }()
  153. lazy var toolV : BFIntroduceToolView = {
  154. let toolV = BFIntroduceToolView()
  155. toolV.centerY = view.centerY
  156. toolV.choosedToolHandle = {[weak self] tool in
  157. guard let sself = self else {
  158. return
  159. }
  160. tool.center = sself.view.center
  161. sself.view.addSubview(tool)
  162. }
  163. return toolV
  164. }()
  165. lazy var progressThumV : BFVideoThumbProgressView = {
  166. let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
  167. vv.dragScrollProgressHandle = {[weak self] process in
  168. DispatchQueue.main.async {[weak self] in
  169. guard let sself = self else {
  170. return
  171. }
  172. if sself.isNormalPlaying || sself.isRecording {
  173. sself.pause()
  174. sself.isDragingProgressSlder = true
  175. }
  176. sself.changeProgress(progress: process)
  177. }
  178. }
  179. vv.dragEndHandle = { [weak self] process in
  180. guard let sself = self else {
  181. return
  182. }
  183. sself.changeProgress(progress: process)
  184. sself.isDragingProgressSlder = false
  185. sself.currentPlayRecordIndex = -1
  186. sself.hadPrepareToPlayRecord = false
  187. }
  188. vv.isHidden = true
  189. return vv
  190. }()
  191. //MARK: ------------------ 生命周期
  192. deinit {
  193. cleanMovieTarget()
  194. NotificationCenter.default.removeObserver(self)
  195. avplayerTimeObserver?.invalidate()
  196. if isRecording{
  197. recorderManager.stopRecord(isCancel: true)
  198. }
  199. assetPlayer?.pause()
  200. recordPlayer?.pause()
  201. }
  202. public override func viewWillAppear(_ animated: Bool) {
  203. super.viewWillAppear(animated)
  204. self.navigationController?.isNavigationBarHidden = true
  205. hiddenNavigation()
  206. }
  207. public override func viewDidLoad(){
  208. super.viewDidLoad()
  209. _ = disablePopGesture()
  210. view.backgroundColor = .lightGray
  211. playView = GPUImageView(frame: view.bounds)
  212. view.addSubview(playView!)
  213. fetchVideo()
  214. view.addSubview(playBtn)
  215. view.addSubview(bottomeView)
  216. bottomeView.addSubview(progreddL)
  217. // view.addSubview(toolV)
  218. bottomeView.addSubview(recordBtn)
  219. bottomeView.addSubview(withDrawBtn)
  220. bottomeView.addSubview(changeVoiceBtn)
  221. bottomeView.addSubview(progressThumV)
  222. // progressThumV.addSubview(progessSildeBackV)
  223. if checkStatus() {
  224. try? AVAudioSession.sharedInstance().setCategory(.playAndRecord, options: .defaultToSpeaker)
  225. }
  226. layoutsubview()
  227. }
  228. func layoutsubview() {
  229. bottomeView.snp.makeConstraints { make in
  230. make.left.bottom.right.equalToSuperview()
  231. make.height.equalTo(adapterWidth(width: 220))
  232. }
  233. progreddL.snp.makeConstraints { make in
  234. make.width.equalTo(100)
  235. make.centerX.equalToSuperview()
  236. make.bottom.equalTo(progressThumV.snp.top)
  237. make.height.equalTo(18)
  238. }
  239. withDrawBtn.snp.makeConstraints { make in
  240. make.left.equalToSuperview()
  241. make.width.height.equalTo(65)
  242. make.top.equalTo(128)
  243. }
  244. changeVoiceBtn.snp.makeConstraints { make in
  245. make.right.equalToSuperview()
  246. make.top.width.height.equalTo(withDrawBtn)
  247. }
  248. recordBtn.snp.makeConstraints { make in
  249. make.left.equalTo(withDrawBtn.snp.right)
  250. make.right.equalTo(changeVoiceBtn.snp.left)
  251. make.height.equalTo(42)
  252. make.top.equalTo(withDrawBtn).offset(6)
  253. }
  254. // progessSildeBackV.snp.makeConstraints { make in
  255. // make.left.equalToSuperview()
  256. // make.right.equalToSuperview()
  257. // make.bottom.equalToSuperview()
  258. // make.height.equalTo(8)
  259. // }
  260. withDrawBtn.imageEdgeInsets = UIEdgeInsets(top: -withDrawBtn.imageView!.height, left: 0, bottom: 0, right: -withDrawBtn.titleLabel!.width);
  261. withDrawBtn.titleEdgeInsets = UIEdgeInsets(top: withDrawBtn.titleLabel!.height + 2, left: -withDrawBtn.imageView!.width, bottom: 0, right: 0);
  262. changeVoiceBtn.imageEdgeInsets = UIEdgeInsets(top: -changeVoiceBtn.imageView!.height-2, left: 0, bottom: 0, right: -changeVoiceBtn.titleLabel!.width);
  263. changeVoiceBtn.titleEdgeInsets = UIEdgeInsets(top: changeVoiceBtn.titleLabel!.height+2, left: -changeVoiceBtn.imageView!.width, bottom: 0, right: 0);
  264. }
  265. // public override func viewWillLayoutSubviews() {
  266. // super.viewWillLayoutSubviews()
  267. //
  268. // }
  269. // MARK: - 按钮事件响应
  270. public override func backBtnClick() {
  271. pause()
  272. }
  273. // 触发拖曳手势后,执行的动作
  274. @objc func pan(recognizer: UIPanGestureRecognizer) {
  275. // 设置 UIView 新的位置
  276. if !checkStatus(show: false) {
  277. return
  278. }
  279. let point = recognizer.location(in: bottomeView)
  280. switch recognizer.state {
  281. case .began:
  282. touchStart = recognizer.location(in: bottomeView)
  283. beginOnStartBtn = recordBtn.frame.contains(touchStart)
  284. case .changed:
  285. if beginOnStartBtn == true {
  286. let nowPoint = recognizer.location(in: bottomeView)
  287. BFLog(1, message: "nowPoint x: \(nowPoint.x) y:\(nowPoint.y)")
  288. if recordBtn.frame.contains(nowPoint) {
  289. } else {
  290. }
  291. }
  292. case .ended:
  293. BFLog(1, message: "移动结束 \(beginOnStartBtn)")
  294. if beginOnStartBtn == true {
  295. beginOnStartBtn = false
  296. if recordBtn.frame.contains(point) {
  297. // 结束录制
  298. endRecord()
  299. } else {
  300. cancleRecord()
  301. }
  302. }
  303. default:
  304. break
  305. }
  306. }
  307. @objc func startRecord(){
  308. BFLog(1, message: "start \(UIControl.Event.touchDown)")
  309. isRecording = true
  310. pause()
  311. let model = PQVoiceModel()
  312. model.startTime = self.currentAssetProgress.seconds
  313. model.volume = 100
  314. recorderManager.voiceModel = model
  315. recorderManager.startRecord(index: 1)
  316. // movie?.startProcessing()
  317. assetPlayer?.volume = 0
  318. assetPlayer?.play()
  319. }
  320. @objc func endRecord(){
  321. isRecording = false
  322. // 存储录音
  323. recorderManager.voiceModel?.endTime = self.currentAssetProgress.seconds
  324. recorderManager.endRecord()
  325. pause()
  326. }
  327. func cancleRecord(){
  328. isRecording = false
  329. recorderManager.cancleRecord()
  330. pause()
  331. }
  332. @objc func withdrawAction(){
  333. pause()
  334. if let model = itemModels[currItemModelIndex].voiceStickers.last {
  335. itemModels[currItemModelIndex].voiceStickers.removeLast()
  336. drawOrUpdateRecordProgessLable()
  337. if let dur = itemModels[currItemModelIndex].baseMaterial?.duration.seconds,dur > 0 {
  338. changeProgress(progress: Float(model.startTime / dur))
  339. isDragingProgressSlder = false
  340. currentPlayRecordIndex = -1
  341. hadPrepareToPlayRecord = false
  342. progressThumV.progress = model.startTime
  343. }
  344. }
  345. }
  346. @objc func changeVoiceAction(){
  347. // nextActionHandle?()
  348. pause()
  349. }
  350. @objc func playVideo(btn:UIButton){
  351. btn.isSelected ? pause() : play()
  352. }
  353. @objc func sliderTouchBegan(sender _: UISlider) {
  354. isDragingProgressSlder = true
  355. pause()
  356. }
  357. @objc func sliderTouchEnded(sender: UISlider) {
  358. changeProgress(progress: sender.value)
  359. isDragingProgressSlder = false
  360. currentPlayRecordIndex = -1
  361. hadPrepareToPlayRecord = false
  362. }
  363. @objc func sliderValueDidChanged(sender: UISlider) {
  364. changeProgress(progress: sender.value)
  365. }
  366. // MARK: - 权限申请
  367. func checkStatus(show: Bool = true) -> Bool {
  368. let status = AVCaptureDevice.authorizationStatus(for: .audio)
  369. switch status {
  370. case .denied, .restricted:
  371. BFLog(message: "麦克风权限被拒绝,请在设置中打开")
  372. if show {
  373. let remindData = BFBaseModel()
  374. remindData.title = "票圈视频需要访问你的麦克风才能录音"
  375. remindData.summary = ""
  376. let remindView = BFRemindView(frame: CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenHeigth))
  377. remindView.isBanned = true
  378. remindView.confirmBtn.setTitle("去设置", for: .normal)
  379. UIApplication.shared.keyWindow?.addSubview(remindView)
  380. remindView.remindData = remindData
  381. remindView.remindBlock = { item, _ in
  382. if item.tag == 2 {
  383. openAppSetting()
  384. }
  385. }
  386. }
  387. return false
  388. case .authorized:
  389. return true
  390. case .notDetermined:
  391. if show {
  392. requestMicroPhoneAuth()
  393. }
  394. return false
  395. default:
  396. break
  397. }
  398. return false
  399. }
  400. func requestMicroPhoneAuth() {
  401. AVCaptureDevice.requestAccess(for: .audio) { granted in
  402. if !granted {
  403. BFLog(message: "麦克风权限被拒绝,请在设置中打开")
  404. }
  405. }
  406. }
  407. // MARK: - 音视频处理
  408. func playRecord(at currentT:CMTime){
  409. if currentPlayRecordIndex == -3 { // 刚录音完,不需要播放
  410. return
  411. }
  412. let (shouldPlayRecordIndex, recordedAudio) = itemModels[currItemModelIndex].voiceStickers.enumerated().first { model in
  413. model.1.endTime > CMTimeGetSeconds(currentT)
  414. } ?? (-1, nil)
  415. guard let recordedAudio = recordedAudio else {
  416. return
  417. }
  418. BFLog(1, message: "当前时间:\(CMTimeGetSeconds(currentT)), 找到的音频:\(recordedAudio.startTime), \(recordedAudio.endTime)")
  419. // 创建播放器
  420. if self.recordPlayer == nil || (self.recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString).lastPathComponent {
  421. let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
  422. if let player = self.recordPlayer {
  423. player.pause()
  424. if let playItem = player.currentItem {
  425. NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
  426. recordPlayer?.replaceCurrentItem(with: newItem)
  427. }
  428. }else {
  429. self.recordPlayer = AVPlayer(playerItem: newItem)
  430. }
  431. self.recordPlayer!.volume = 1
  432. // self.recordPlayer?.prepareToPlay()
  433. currentPlayRecordIndex = -1
  434. hadPrepareToPlayRecord = false
  435. BFLog(1, message: "录音播放器初始化(有时候不准)")
  436. NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: newItem, queue: .main) { [weak self] notify in
  437. self?.hadPrepareToPlayRecord = false
  438. self?.currentPlayRecordIndex = -1
  439. }
  440. }
  441. synced(currentPlayRecordIndex) {
  442. if !hadPrepareToPlayRecord
  443. && CMTimeGetSeconds(currentT) >= recordedAudio.startTime
  444. && CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
  445. {
  446. // 应当开始播放了
  447. // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
  448. if currentPlayRecordIndex == -1 && self.isNormalPlaying{
  449. let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
  450. DispatchQueue.main.async {[weak self] in
  451. self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: {[weak self] finished in
  452. if finished && (self?.isNormalPlaying ?? false) {
  453. self?.recordPlayer?.play()
  454. BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero))")
  455. }
  456. })
  457. }
  458. currentPlayRecordIndex = shouldPlayRecordIndex
  459. hadPrepareToPlayRecord = true
  460. BFLog(1, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero))")
  461. }
  462. }
  463. }
  464. BFLog(1, message: "应当播放:\(shouldPlayRecordIndex), 当前播放:\(currentPlayRecordIndex)")
  465. // if let recordedAudio = recordedAudio {
  466. //
  467. //
  468. // if shouldPlayRecordIndex != currentPlayRecordIndex {
  469. // // 设置新的播放资源
  470. //
  471. //// self.recordPlayer.delegate = self
  472. // self.recordPlayer.play()
  473. //
  474. // } else {
  475. // // 更新播放进度
  476. // let second = CMTimeGetSeconds(duration) - recordedAudio.startTime
  477. // self.recordPlayer.seek(to: CMTime(seconds: second, preferredTimescale: 25), toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000))
  478. // }
  479. // }
  480. }
  481. func play(){
  482. BFLog(1, message: "开始播放 \(self.currentAssetProgress.seconds)")
  483. isNormalPlaying = true
  484. assetPlayer?.volume = 0.2
  485. movie?.startProcessing()
  486. self.assetPlayer?.play()
  487. }
  488. func pause(){
  489. BFLog(1, message: "暂停播放")
  490. isNormalPlaying = false
  491. // movie?.cancelProcessing()
  492. assetPlayer?.pause()
  493. recordPlayer?.pause()
  494. assetPlayer?.seek(to: self.currentAssetProgress , toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000), completionHandler: { finished in
  495. })
  496. }
  497. func fetchVideo(){
  498. if self.assets.count > 0 {
  499. currItemModelIndex = 0
  500. for (index, asset) in self.assets.enumerated() {
  501. let itemModel = BFRecordItemModel()
  502. itemModel.index = 0
  503. itemModels.append(itemModel)
  504. let options = PHVideoRequestOptions()
  505. options.isNetworkAccessAllowed = true
  506. options.deliveryMode = .automatic
  507. PHImageManager.default().requestPlayerItem(forVideo:asset, options: options, resultHandler: { [weak self] playerItem, info in
  508. guard let item = playerItem else {
  509. cShowHUB(superView: nil, msg: "视频获取失败")
  510. return
  511. }
  512. if index == 0 {
  513. self?.setAudioPlay(item: item)
  514. self?.setVideoPlay(item: item)
  515. }
  516. })
  517. // let option = PHImageRequestOptions()
  518. // option.isNetworkAccessAllowed = true //允许下载iCloud的图片
  519. // option.resizeMode = .fast
  520. // option.deliveryMode = .highQualityFormat
  521. // PHImageManager.default().requestImage(for: asset,
  522. // targetSize: self.view.bounds.size,
  523. // contentMode: .aspectFit,
  524. // options: option)
  525. // { (image, nil) in
  526. // // 设置首帧/封面
  527. // if image != nil {
  528. // let pic = GPUImagePicture(image: image)
  529. // let filet = GPUImageFilter()
  530. // pic?.addTarget(filet)
  531. // filet.addTarget(self.playView)
  532. // pic?.processImage()
  533. // }
  534. // }
  535. PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
  536. if let urlasset = asset as? AVURLAsset {
  537. self?.avasset = urlasset
  538. itemModel.baseMaterial = urlasset
  539. DispatchQueue.main.async {[weak self] in
  540. self?.progressThumV.videoAsset = urlasset
  541. self?.progressThumV.isHidden = false
  542. }
  543. }
  544. })
  545. }
  546. }
  547. }
  548. func setVideoPlay(item:AVPlayerItem){
  549. if movie != nil {
  550. cleanMovieTarget()
  551. }
  552. movie = GPUImageMovie(playerItem: item)
  553. // movie?.runBenchmark = true
  554. movie?.playAtActualSpeed = true
  555. let filter = GPUImageFilter()
  556. movie?.addTarget(filter)
  557. filter.addTarget(playView)
  558. movie?.startProcessing()
  559. }
  560. func setAudioPlay(item:AVPlayerItem){
  561. if let playItem = assetPlayer?.currentItem {
  562. NotificationCenter.default.removeObserver(self, name: .AVPlayerItemDidPlayToEndTime, object: playItem)
  563. assetPlayer?.replaceCurrentItem(with: item)
  564. }else {
  565. assetPlayer = AVPlayer(playerItem: item)
  566. avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
  567. // 进度监控
  568. if !((self?.isNormalPlaying ?? false) || (self?.isRecording ?? false)) {
  569. return
  570. }
  571. // 播放对应的录音音频
  572. self?.playRecord(at: time)
  573. self?.currentAssetProgress = time
  574. BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
  575. if CMTimeGetSeconds(item.duration) > 0, !(self?.isDragingProgressSlder ?? false) {
  576. DispatchQueue.main.async { [weak self] in
  577. self?.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
  578. self?.progressThumV.progress = time.seconds
  579. }
  580. }
  581. } as? NSKeyValueObservation
  582. }
  583. NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: assetPlayer?.currentItem, queue: .main) { [weak self] notify in
  584. BFLog(1, message: "AVPlayerItemDidPlayToEndTime = \(notify)")
  585. self?.isNormalPlaying = false
  586. self?.assetPlayer?.seek(to: CMTime.zero)
  587. self?.currentPlayRecordIndex = -1
  588. if self?.isRecording ?? false {
  589. self?.endRecord()
  590. }
  591. }
  592. }
  593. func cleanMovieTarget(){
  594. movie?.cancelProcessing()
  595. movie?.targets().forEach({ target in
  596. if let objc = target as? GPUImageOutput{
  597. objc.removeAllTargets()
  598. }
  599. })
  600. movie?.removeAllTargets()
  601. movie?.removeFramebuffer()
  602. GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
  603. }
  604. func generationTimeRanges() -> [CMTimeRange]{
  605. var ranges = [CMTimeRange]()
  606. var start : Double = 0
  607. for model in itemModels[currItemModelIndex].voiceStickers {
  608. if model.startTime > start{
  609. let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 100), duration: CMTime(seconds: model.startTime - start, preferredTimescale: 100))
  610. ranges.append(range)
  611. }
  612. ranges.append(CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 100), end: CMTime(seconds: model.endTime, preferredTimescale: 100)))
  613. start = model.endTime
  614. }
  615. return ranges
  616. }
  617. //MARK: - 录音对应图像绘制
  618. func changeProgress(progress:Float) {
  619. if let duration = assetPlayer?.currentItem?.duration {
  620. self.currentAssetProgress = CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100)
  621. DispatchQueue.main.async {[weak self] in
  622. self!.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(self!.currentAssetProgress))
  623. }
  624. assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000), toleranceAfter: CMTime(value: 1, timescale: 1000)) { finished in
  625. }
  626. }
  627. }
  628. func drawOrUpdateRecordProgessLable(){
  629. DispatchQueue.main.async {[weak self] in
  630. guard let sself = self else {
  631. return
  632. }
  633. sself.progressThumV.progessIndicateBackV.subviews.forEach { vv in
  634. vv.removeFromSuperview()
  635. }
  636. if let totalDur = sself.itemModels[sself.currItemModelIndex].baseMaterial?.duration.seconds, totalDur > 0, sself.itemModels[sself.currItemModelIndex].voiceStickers.count > 0 {
  637. let width = sself.progressThumV.progessIndicateBackV.width
  638. let height = sself.progressThumV.progessIndicateBackV.height
  639. sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { model in
  640. let lineV = UIView(frame: CGRect(x: model.startTime * width / totalDur , y: 0, width: (model.endTime - model.startTime) * width / totalDur, height: height))
  641. lineV.backgroundColor = ThemeStyleGreen()
  642. sself.progressThumV.progessIndicateBackV.addSubview(lineV)
  643. }
  644. }
  645. }
  646. }
  647. }
  648. extension BFRecordScreenController:GPUImageMovieDelegate {
  649. public func didCompletePlayingMovie(){
  650. BFLog(1, message: "播放结束")
  651. currentPlayRecordIndex = -1
  652. }
  653. }
  654. extension BFRecordScreenController:AVAudioRecorderDelegate {
  655. public func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) {
  656. BFLog(1, message: "录音结束")
  657. }
  658. }
  659. extension BFRecordScreenController : AVAudioPlayerDelegate {
  660. public func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) {
  661. BFLog(1, message: "录音播放结束")
  662. }
  663. }