123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214 |
- //
- // BFRecordItemModel.swift
- // BFRecordScreenKit
- //
- // Created by 胡志强 on 2021/12/6.
- //
- import BFCommonKit
- import BFMediaKit
- import Foundation
- import Photos
- struct SplitRecordRange {
- var isRecord: Bool = false
- var range: CMTimeRange
- var index: Int
- }
- public class BFRecordItemModel: NSObject {
- // var baseMaterial : AVURLAsset?
- var localPath: String?
- var materialDuraion: Double = 0.0
- var fetchCoverImg: ((UIImage) -> Void)?
- var fetchAVUrlAsset: ((AVURLAsset) -> Void)?
- var fetchPlayItem: ((AVPlayerItem) -> Void)?
- var dealedDurationRanges = [SplitRecordRange]() // 录音切割的时间区间,合成导出时计算
- public var voiceStickers = [PQVoiceModel]() //
- public var videoStickers = [PQEditVisionTrackMaterialsModel]() // 合成导出时计算
- public var imageStickers = [PQEditVisionTrackMaterialsModel]() //
- public var titleStickers = [PQEditSubTitleModel]() // 字幕贴纸
-
- var events = [WithDrawModel]() // 行为记录,方便撤销
- public var coverImg: UIImage? // 封面图
- public var thumbImgs = [UIImage]() // 缩略图集合
- public var playItem: AVPlayerItem? // 视频playerItem
- public var videoAsset: AVURLAsset? // 视频Asset
- public var mediaType: StickerType? // 素材类型
- public var progress: Double = 0 // 更新进度
- public var index = 0 // 素材index
- public var width = 0 // 素材宽
- public var height = 0 // 素材高
- public var videoDegress : UInt = 0 // 视频拍摄角度 90,270为横屏,180,0为竖屏
- func initOriginData(phasset: PHAsset) {
- width = phasset.pixelWidth
- height = phasset.pixelHeight
- if phasset.mediaType == .image {
- mediaType = .IMAGE
- localPath = "image"
- } else if phasset.mediaType == .video {
- mediaType = .VIDEO
- fetchPlayItem(phasset)
- }
- fetchCoverImage(phasset)
- fetchAVUrlAsset(phasset)
- }
- func fetchCoverImage(_ phasset: PHAsset) {
- let option = PHImageRequestOptions()
- option.isNetworkAccessAllowed = true // 允许下载iCloud的图片
- option.resizeMode = .fast
- option.deliveryMode = .highQualityFormat
- PHImageManager.default().requestImage(for: phasset,
- targetSize: CGSize(width: width, height: height),
- contentMode: .aspectFit,
- options: option) { [weak self] image, _ in
- // 设置首帧/封面
- if image != nil {
- self?.coverImg = image
- self?.fetchCoverImg?(image!)
- }
- }
- }
- func fetchPlayItem(_ phasset: PHAsset) {
- let options = PHVideoRequestOptions()
- options.isNetworkAccessAllowed = true
- options.deliveryMode = .automatic
- PHImageManager.default().requestPlayerItem(forVideo: phasset, options: options, resultHandler: { [weak self] playerItem, _ in
-
- guard let item = playerItem else {
- cShowHUB(superView: nil, msg: "视频获取失败:\(self?.index ?? 0)")
- return
- }
- self?.playItem = item
- self?.fetchPlayItem?(item)
- })
- }
- func fetchAVUrlAsset(_ phasset: PHAsset) {
- let options = PHVideoRequestOptions()
- options.isNetworkAccessAllowed = true
- options.deliveryMode = .automatic
- PHCachingImageManager().requestAVAsset(forVideo: phasset, options: options, resultHandler: { [weak self] (asset: AVAsset?, _: AVAudioMix?, _) in
- if let videoAsset = asset as? AVURLAsset {
- self?.materialDuraion = videoAsset.duration.seconds
- self?.localPath = (videoAsset.url.absoluteString.removingPercentEncoding)?.replacingOccurrences(of: "file://", with: "")
- self?.videoAsset = videoAsset
- self?.fetchAVUrlAsset?(videoAsset)
- }
- })
- }
- func generationTimeRanges(needSort _: Bool = false) {
- dealedDurationRanges.removeAll()
- var start: Double = 0
- var list: [PQVoiceModel]
- list = voiceStickers.sorted { model1, model2 in
- model1.startTime < model2.startTime
- }
- for model in list {
- if model.startTime > start {
- //
- let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), duration: CMTime(seconds: model.startTime - start, preferredTimescale: 1000))
- dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
- }
- let ind = voiceStickers.firstIndex(of: model)
- let range = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
- dealedDurationRanges.append(SplitRecordRange(isRecord: true, range: range, index: ind ?? -1))
- start = model.endTime
- }
- if start < materialDuraion {
- let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), end: CMTime(seconds: materialDuraion, preferredTimescale: 1000))
- dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
- }
- }
-
- /// 视频分解成帧
- /// - parameter frames : 需要取的帧数
- /// - parameter firstImagesCount : 获取首先N张连续视频帧后先返回给调用方使用作为缓冲
- /// - parameter splitCompleteClosure : 回调
- func splitVideoFileUrlFps(frames: Int, firstImagesCount:Int = 0, splitCompleteClosure: @escaping ((Bool, [UIImage]) -> Void)) {
- guard let urlAsset = videoAsset, urlAsset.duration.seconds > 0 else {
- return
- }
- var splitImages = [UIImage]()
- var times = [NSValue]()
- // let urlAsset = AVURLAsset(url: URL(fileURLWithPath: localPath))
- let start = 0
- // let end = Int(urlAsset.duration.seconds * Float64(fps))
- let fps = Double(frames) / urlAsset.duration.seconds
- for i in start..<frames {
- let timeValue = NSValue(time: CMTimeMake(value: Int64(i * 1000), timescale: Int32(fps * 1000)))
- times.append(timeValue)
- }
- let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
- imgGenerator.requestedTimeToleranceBefore = CMTime.zero
- imgGenerator.requestedTimeToleranceAfter = CMTime.zero
- imgGenerator.appliesPreferredTrackTransform = true
- let timesCount = times.count
- var cocu = 0
- // 获取每一帧的图片
- imgGenerator.generateCGImagesAsynchronously(forTimes: times) { _, image, _, result, _ in
- cocu += 1
- switch result {
- case AVAssetImageGenerator.Result.cancelled:
- BFLog(1, message: "splitVideo: cancel")
- case AVAssetImageGenerator.Result.failed:
- BFLog(1, message: "splitVideo: failed")
- case AVAssetImageGenerator.Result.succeeded:
- let framImg = UIImage(cgImage: image!)
- splitImages.append(framImg)
- // BFLog(1, message: "aaa: \(requestedTime.seconds) - \(actualTime.seconds)")
- @unknown default:
- break
- }
-
- if cocu == firstImagesCount {
- splitCompleteClosure(false, splitImages)
- }
-
- if cocu == timesCount { // 最后一帧时 回调赋值
- splitCompleteClosure(true, splitImages)
- BFLog(1, message: "splitVideo: complete")
- }
- }
- }
- /// 视频分解成帧
- /// - parameter fileUrl : 视频地址
- /// - parameter fps : 自定义帧数 每秒内取的帧数
- /// - parameter splitCompleteClosure : 回调
- func getThumbImageAtTime(urlAsset: AVURLAsset, time: CMTime) -> UIImage? {
- let imgGenerator = AVAssetImageGenerator(asset: urlAsset)
- imgGenerator.requestedTimeToleranceBefore = CMTime.zero
- imgGenerator.requestedTimeToleranceAfter = CMTime.zero
- var cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
- if cgImg == nil {
- imgGenerator.requestedTimeToleranceBefore = CMTime.positiveInfinity
- imgGenerator.requestedTimeToleranceAfter = CMTime.positiveInfinity
- cgImg = try? imgGenerator.copyCGImage(at: time, actualTime: nil)
- }
- return cgImg == nil ? nil : UIImage(cgImage: cgImg!)
- }
- }
|