BFRecordItemModel.swift 8.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. //
  2. // BFRecordItemModel.swift
  3. // BFRecordScreenKit
  4. //
  5. // Created by 胡志强 on 2021/12/6.
  6. //
  7. import BFCommonKit
  8. import BFMediaKit
  9. import Foundation
  10. import Photos
  11. struct SplitRecordRange {
  12. var isRecord: Bool = false
  13. var range: CMTimeRange
  14. var index: Int
  15. }
  16. public class BFRecordItemModel: NSObject {
  17. // var baseMaterial : AVURLAsset?
  18. var localPath: String?
  19. var materialDuraion: Double = 0.0
  20. var fetchCoverImgCallBack: ((UIImage) -> Void)?
  21. var fetchAVUrlAssetCallBack: (() -> Void)?
  22. var fetchPlayItemCallBack: ((BFRecordItemModel?) -> Void)?
  23. var dealedDurationRanges = [SplitRecordRange]() // 录音切割的时间区间,合成导出时计算
  24. public var voiceStickers = [PQVoiceModel]() //
  25. public var videoStickers = [PQEditVisionTrackMaterialsModel]() // 合成导出时计算
  26. public var imageStickers = [PQEditVisionTrackMaterialsModel]() //
  27. public var titleStickers = [PQEditSubTitleModel]() // 字幕贴纸
  28. var events = [WithDrawModel]() // 行为记录,方便撤销
  29. public var coverImg: UIImage? // 封面图
  30. public var thumbImgs = [UIImage]() // 缩略图集合
  31. public var playItem: AVPlayerItem? // 视频playerItem
  32. public var videoAsset: AVURLAsset? // 视频Asset
  33. public var mediaType: StickerType? // 素材类型
  34. public var progress: Double = 0 // 更新进度
  35. public var index = 0 // 素材index
  36. public var width = 0 // 素材宽
  37. public var height = 0 // 素材高
  38. public var videoDegress : UInt = 0 // 视频拍摄角度 90,270为横屏,180,0为竖屏
  39. func initOriginData(phasset: PHAsset) {
  40. width = phasset.pixelWidth
  41. height = phasset.pixelHeight
  42. materialDuraion = phasset.duration
  43. fetchCoverImage(phasset)
  44. fetchAVUrlAsset(phasset)
  45. if phasset.mediaType == .image {
  46. mediaType = .IMAGE
  47. localPath = "image"
  48. } else if phasset.mediaType == .video {
  49. mediaType = .VIDEO
  50. fetchPlayItem(phasset)
  51. }
  52. }
  53. func fetchCoverImage(_ phasset: PHAsset) {
  54. let option = PHImageRequestOptions()
  55. option.isNetworkAccessAllowed = true // 允许下载iCloud的图片
  56. option.resizeMode = .fast
  57. option.deliveryMode = .highQualityFormat
  58. PHImageManager.default().requestImage(for: phasset,
  59. targetSize: CGSize(width: width, height: height),
  60. contentMode: .aspectFit,
  61. options: option) { [weak self] image, _ in
  62. // 设置首帧/封面
  63. if image != nil {
  64. self?.coverImg = image
  65. self?.fetchCoverImgCallBack?(image!)
  66. }
  67. }
  68. }
  69. func fetchPlayItem(_ phasset: PHAsset) {
  70. let options = PHVideoRequestOptions()
  71. options.isNetworkAccessAllowed = true
  72. options.deliveryMode = .automatic
  73. PHImageManager.default().requestPlayerItem(forVideo: phasset, options: options, resultHandler: { [weak self] playerItem, _ in
  74. guard let item = playerItem else {
  75. self?.fetchPlayItemCallBack?(self)
  76. return
  77. }
  78. self?.playItem = item
  79. self?.fetchPlayItemCallBack?(self)
  80. })
  81. }
  82. public func fetchAVUrlAsset(_ phasset: PHAsset) {
  83. let options = PHVideoRequestOptions()
  84. options.isNetworkAccessAllowed = true
  85. options.deliveryMode = .automatic
  86. PHCachingImageManager().requestAVAsset(forVideo: phasset, options: options, resultHandler: { [weak self] (asset: AVAsset?, _: AVAudioMix?, _) in
  87. guard let sself = self else {
  88. return
  89. }
  90. if let videoAsset = (asset as? AVURLAsset) {
  91. sself.localPath = (videoAsset.url.absoluteString.removingPercentEncoding)?.replacingOccurrences(of: "file://", with: "")
  92. sself.videoAsset = videoAsset
  93. sself.fetchAVUrlAssetCallBack?()
  94. }
  95. })
  96. }
  97. func generationTimeRanges(needSort _: Bool = false) {
  98. dealedDurationRanges.removeAll()
  99. var start: Double = 0
  100. var list: [PQVoiceModel]
  101. list = voiceStickers.sorted { model1, model2 in
  102. model1.startTime < model2.startTime
  103. }
  104. for model in list {
  105. if model.startTime > start {
  106. //
  107. let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), duration: CMTime(seconds: model.startTime - start, preferredTimescale: 1000))
  108. dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
  109. }
  110. let ind = voiceStickers.firstIndex(of: model)
  111. let range = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
  112. dealedDurationRanges.append(SplitRecordRange(isRecord: true, range: range, index: ind ?? -1))
  113. start = model.endTime
  114. }
  115. if start < materialDuraion {
  116. let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), end: CMTime(seconds: materialDuraion, preferredTimescale: 1000))
  117. dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
  118. }
  119. }
  120. /// 视频分解成帧
  121. /// - parameter frames : 需要取的帧数
  122. /// - parameter firstImagesCount : 获取首先N张连续视频帧后先返回给调用方使用作为缓冲
  123. /// - parameter splitCompleteClosure : 回调
  124. func splitVideoFileUrlFps(frames: Int, firstImagesCount:Int = 0, splitCompleteClosure: @escaping ((Bool, [UIImage]) -> Void)) {
  125. guard let urlAsset = videoAsset, urlAsset.duration.seconds > 0 else {
  126. return
  127. }
  128. var splitImages = [UIImage]()
  129. var times = [NSValue]()
  130. // let urlAsset = AVURLAsset(url: URL(fileURLWithPath: localPath))
  131. let start = 0
  132. // let end = Int(urlAsset.duration.seconds * Float64(fps))
  133. let fps = Double(frames) / urlAsset.duration.seconds
  134. for i in start..<frames {
  135. let timeValue = NSValue(time: CMTimeMake(value: Int64(i * 1000), timescale: Int32(fps * 1000)))
  136. times.append(timeValue)
  137. }
  138. let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
  139. imgGenerator.requestedTimeToleranceBefore = CMTime.zero
  140. imgGenerator.requestedTimeToleranceAfter = CMTime.zero
  141. imgGenerator.appliesPreferredTrackTransform = true
  142. let timesCount = times.count
  143. var cocu = 0
  144. // 获取每一帧的图片
  145. imgGenerator.generateCGImagesAsynchronously(forTimes: times) { _, image, _, result, _ in
  146. cocu += 1
  147. switch result {
  148. case AVAssetImageGenerator.Result.cancelled:
  149. BFLog(1, message: "splitVideo: cancel")
  150. case AVAssetImageGenerator.Result.failed:
  151. BFLog(1, message: "splitVideo: failed")
  152. case AVAssetImageGenerator.Result.succeeded:
  153. let framImg = UIImage(cgImage: image!)
  154. splitImages.append(framImg)
  155. // BFLog(1, message: "aaa: \(requestedTime.seconds) - \(actualTime.seconds)")
  156. @unknown default:
  157. break
  158. }
  159. if cocu == firstImagesCount {
  160. splitCompleteClosure(false, splitImages)
  161. }
  162. if cocu == timesCount { // 最后一帧时 回调赋值
  163. splitCompleteClosure(true, splitImages)
  164. BFLog(1, message: "splitVideo: complete")
  165. }
  166. }
  167. }
  168. /// 视频分解成帧
  169. /// - parameter fileUrl : 视频地址
  170. /// - parameter fps : 自定义帧数 每秒内取的帧数
  171. /// - parameter splitCompleteClosure : 回调
  172. func getThumbImageAtTime(urlAsset: AVURLAsset, time: CMTime) -> UIImage? {
  173. let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
  174. imgGenerator.requestedTimeToleranceBefore = CMTime.zero
  175. imgGenerator.requestedTimeToleranceAfter = CMTime.zero
  176. var cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
  177. if cgImg == nil {
  178. imgGenerator.requestedTimeToleranceBefore = CMTime.positiveInfinity
  179. imgGenerator.requestedTimeToleranceAfter = CMTime.positiveInfinity
  180. cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
  181. }
  182. return cgImg == nil ? nil : UIImage(cgImage: cgImg!)
  183. }
  184. }