PQPlayerViewModel.swift 46 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854
  1. //
  2. // PQPlayerViewModel.swift
  3. // PQSpeed
  4. //
  5. // Created by ak on 2021/1/27.
  6. // Copyright © 2021 BytesFlow. All rights reserved.
  7. // 视频渲染相关逻辑方法
  8. import RealmSwift
  9. import UIKit
  10. import BFCommonKit
  11. import BFUIKit
  12. open class PQPlayerViewModel: NSObject {
  13. /// 根据贴纸信息转成种 fitler ,编辑 ,总览,导出共用
  14. /// - Parameter parts: filter 组
  15. public class func partModelToFilters(sections: [PQEditSectionModel], inputSize: CGSize = .zero) -> ([PQBaseFilter], [URL]) {
  16. // 所有段的声音位置
  17. var audioFiles: Array = Array<URL>.init()
  18. // 所有滤镜数组
  19. var filters: Array = Array<PQBaseFilter>.init()
  20. /*
  21. 一, 默认素材时长
  22. 图片:2S
  23. 视频: X1倍速 播一边
  24. GIF: X1倍速 播一边
  25. 二,资源适配规则
  26. 1,有配音声音 也就是有文字
  27. 适配系数 = 配音时长/视觉总时长
  28. 视觉元素最终时长 = 视觉元素原时长 * 适配系数
  29. 2,无配音无文字
  30. 使用素材的默认时长
  31. 3,无配音有文字
  32. 适配系数 = 视频总时长/文字总时长
  33. 文字每一句的实际时长 = 文字分段落的原始时长 * 适配系统
  34. */
  35. // 返回时自动预览开始播放 添加有贴纸开始自动播放
  36. var partTotaDuration: Float64 = 0
  37. for section in sections {
  38. autoreleasepool {
  39. // 优先使用 mix audio
  40. if section.mixEmptyAuidoFilePath.count > 0 {
  41. audioFiles.append(URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath.replacingOccurrences(of: documensDirectory, with: "")))
  42. BFLog(message: "add mixEmptyAuidoFilePath mixEmptyAuidoFilePath")
  43. } else {
  44. if section.audioFilePath.count > 0 {
  45. audioFiles.append(URL(fileURLWithPath: documensDirectory + section.audioFilePath.replacingOccurrences(of: documensDirectory, with: "")))
  46. BFLog(message: "add audioFilePath audioFilePath")
  47. }
  48. }
  49. var totalDuration: Float64 = 0
  50. // 根据已经选择的贴纸类型创建各自filters
  51. for sticker in section.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
  52. autoreleasepool {
  53. sticker.timelineIn = totalDuration + partTotaDuration
  54. totalDuration = totalDuration + sticker.aptDuration
  55. sticker.timelineOut = totalDuration + partTotaDuration
  56. BFLog(message: "创建 filter start :\(sticker.timelineIn) end :\(sticker.timelineOut) type is \(sticker.type)")
  57. if(sticker.aptDuration > 0){
  58. if sticker.type == StickerType.IMAGE.rawValue {
  59. let imageFilter = PQImageFilter(sticker: sticker)
  60. filters.append(imageFilter)
  61. } else if sticker.type == StickerType.VIDEO.rawValue {
  62. let videoFilter = PQMovieFilter(movieSticker: sticker)
  63. filters.append(videoFilter)
  64. } else if sticker.type == StickerType.GIF.rawValue {
  65. let gifFilter = PQGifFilter(sticker: sticker)
  66. filters.append(gifFilter)
  67. }
  68. }else{
  69. BFLog(message: "sticker.aptDuration is error create filter error!!! \(sticker.aptDuration )")
  70. }
  71. }
  72. }
  73. // 字幕如果是多段的 ,字幕的开始时间是 前几段 part duration 总时长 所以要重新计算
  74. var newSubtitleData: [PQEditSubTitleModel] = Array()
  75. // 如果有录制声音转的字幕优先使用,在使用人工输入文字字幕s
  76. let recorderSubtitle = List<PQEditSubTitleModel>()
  77. if section.sectionTimeline?.visionTrack?.getSubtitleMatraislInfo() != nil {
  78. for subtitleMatraislInfo in section.sectionTimeline!.visionTrack!.getSubtitleMatraislInfo() {
  79. BFLog(message: "有录音字幕")
  80. let editSubTitleModel = PQEditSubTitleModel()
  81. editSubTitleModel.text = subtitleMatraislInfo.subtitleInfo?.text ?? ""
  82. editSubTitleModel.timelineIn = subtitleMatraislInfo.timelineIn
  83. editSubTitleModel.timelineOut = subtitleMatraislInfo.timelineOut
  84. recorderSubtitle.append(editSubTitleModel)
  85. }
  86. }
  87. for (index, subTitle) in recorderSubtitle.count > 0 ? recorderSubtitle.enumerated() : section.subTitles.enumerated() {
  88. BFLog(message: "有配音字幕")
  89. let newSubtitle = PQEditSubTitleModel()
  90. newSubtitle.timelineIn = subTitle.timelineIn
  91. newSubtitle.timelineOut = subTitle.timelineOut
  92. newSubtitle.text = subTitle.text.replacingOccurrences(of: "\n", with: "")
  93. BFLog(message: "第\(index)个字幕 subTitle old start : \(newSubtitle.timelineIn) end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
  94. // subtitle duration
  95. let duration: Float64 = (newSubtitle.timelineOut - newSubtitle.timelineIn)
  96. newSubtitle.timelineIn = partTotaDuration + newSubtitle.timelineIn
  97. newSubtitle.timelineOut = newSubtitle.timelineIn + duration
  98. BFLog(message: "第\(index)个字幕 subTitle new start : \(newSubtitle.timelineIn) end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
  99. newSubtitleData.append(newSubtitle)
  100. // let subTitle = PQSubTitleFilter(st: [newSubtitle], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
  101. // filters.append(subTitle)
  102. }
  103. // 无视觉素材是大字幕方式 有数据在初始字幕filter
  104. // for subtitle in newSubtitleData{
  105. // let subTitleFilter = PQSubTitleFilter(st: [newSubtitleData[0]], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0,inputSize: inputSize)
  106. // filters.append(subTitleFilter)
  107. // }
  108. if newSubtitleData.count > 0 {
  109. let subTitleFilter = PQSubTitleFilter(st: newSubtitleData, isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
  110. filters.append(subTitleFilter)
  111. // DispatchQueue.main.async {
  112. // }
  113. }
  114. var tempDuration = section.allStickerAptDurationNoRound() == 0 ? section.sectionDuration : section.allStickerAptDurationNoRound()
  115. BFLog(message: "tempDuration 1 is \(tempDuration)")
  116. // 如果音频时长是经过加空音频 加长后的 要使用长音频
  117. if section.mixEmptyAuidoFilePath.count > 0 {
  118. BFLog(message: "有拼接的数据")
  119. let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath), options: avAssertOptions)
  120. if tempDuration <= audioAsset.duration.seconds {
  121. tempDuration = audioAsset.duration.seconds
  122. } else {
  123. BFLog(message: "音频文件时长为0?")
  124. }
  125. }
  126. BFLog(message: "tempDuration 2 is \(tempDuration)")
  127. partTotaDuration = partTotaDuration + tempDuration
  128. }
  129. BFLog(message: "audioFiles 声音文件总数\(audioFiles.count)")
  130. }
  131. //"/Resource/DownloadImages/images_1631358852.933532"
  132. //""/Resource/DownloadImages/images_1631358852.933532""
  133. return (filters, audioFiles)
  134. }
  135. public class func calculationStickAptDurationReal(currentPart: PQEditSectionModel, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
  136. // XXXXXX如果 没有选择发音人 就算有自动的转的声音文件也不按声音时长计算,都是素材原有时长
  137. // let audioTotalDuration: Float64 = Float64(currentPart.sectionDuration)
  138. // 1,计算贴纸所有原始时长
  139. var stickerTotalDuration: Float64 = 0
  140. for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
  141. var stikcerDuration: Float64 = sticker.duration
  142. if sticker.videoIsCrop() {
  143. BFLog(message: "这个视频有裁剪 \(sticker.locationPath)")
  144. stikcerDuration = sticker.out - sticker.model_in
  145. }
  146. stickerTotalDuration = stickerTotalDuration + stikcerDuration
  147. }
  148. // 真人声音时长
  149. var realAudioDuration = 0.0
  150. BFLog(message: "currentPart.audioFilePath is \(currentPart.audioFilePath)")
  151. if currentPart.audioFilePath.count > 0 {
  152. let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath), options: avAssertOptions)
  153. realAudioDuration = audioAsset.duration.seconds
  154. }
  155. BFLog(message: "所有素材的总时 \(stickerTotalDuration) 文字转语音的时长:\(realAudioDuration)")
  156. if stickerTotalDuration == 0 && realAudioDuration == 0 {
  157. DispatchQueue.main.async {
  158. completeHander(currentPart)
  159. }
  160. return
  161. }
  162. // 所有视频素材原有时长 > 音频文件(字幕时长 有可能有声音,有可能没有声音自动转的)
  163. if stickerTotalDuration - realAudioDuration > 0.01 {
  164. // 要创建空文件加长原有声音
  165. let tool = PQCreateEmptyWAV(sampleRate: 8000,
  166. channel: 1,
  167. duration: stickerTotalDuration - realAudioDuration,
  168. bit: 16)
  169. let timeInterval: TimeInterval = Date().timeIntervalSince1970
  170. var audioFileTempPath = exportAudiosDirectory
  171. if !directoryIsExists(dicPath: audioFileTempPath) {
  172. BFLog(message: "文件夹不存在 \(audioFileTempPath)")
  173. createDirectory(path: audioFileTempPath)
  174. }
  175. audioFileTempPath.append("empty_\(timeInterval).wav")
  176. tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
  177. var tempUrls: Array = NSArray() as! [URL]
  178. if currentPart.audioFilePath.count > 0 {
  179. BFLog(message: "currentPart.audioFilePath is \(String(describing: currentPart.audioFilePath))")
  180. tempUrls.append(URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath))
  181. }
  182. tempUrls.append(URL(fileURLWithPath: audioFileTempPath))
  183. PQPlayerViewModel.mergeAudios(urls: tempUrls) { completURL in
  184. if completURL == nil {
  185. BFLog(message: "合并文件有问题!")
  186. return
  187. }
  188. // file:///var/mobile/Containers/Data/Application/2A008644-31A6-4D7E-930B-F1099F36D577/Documents/Resource/ExportAudios/merge_1618817019.789495.m4a
  189. let audioAsset = AVURLAsset(url: completURL!, options: avAssertOptions)
  190. BFLog(message: "completURL mix : \(String(describing: completURL)) audioFilePath durtion \(audioAsset.duration.seconds)")
  191. currentPart.mixEmptyAuidoFilePath = completURL!.absoluteString.replacingOccurrences(of: documensDirectory, with: "").replacingOccurrences(of: "file://", with: "")
  192. currentPart.sectionDuration = audioAsset.duration.seconds
  193. BFLog(message: "stickerTotalDuration is \(stickerTotalDuration) mixEmptyAuidoFilePath 设置后 是\(currentPart.mixEmptyAuidoFilePath) 时长是:\(currentPart.sectionDuration)")
  194. // 1.2)计算贴纸的逻辑显示时长
  195. for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
  196. var tempDuration = sticker.duration
  197. if sticker.videoIsCrop() {
  198. tempDuration = sticker.out - sticker.model_in
  199. BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
  200. }
  201. sticker.aptDuration = tempDuration
  202. }
  203. DispatchQueue.main.async {
  204. completeHander(currentPart)
  205. }
  206. }
  207. }
  208. } else {
  209. // 这种情况下 mixEmptyAuidoFilePath 应该为空
  210. currentPart.mixEmptyAuidoFilePath = ""
  211. // currentPart.audioFilePath = ""
  212. currentPart.sectionDuration = realAudioDuration
  213. // 1.1)计算系数
  214. let coefficient: Float64 = realAudioDuration / stickerTotalDuration
  215. BFLog(message: "系数 is: \(coefficient) stickerTotalDuration is \(stickerTotalDuration) audioTotalDuration is :\(realAudioDuration)")
  216. // 1.2)计算贴纸的逻辑显示时长
  217. for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
  218. // 如果是视频素材有过裁剪 就使用裁剪时长
  219. var tempDuration = sticker.duration
  220. if sticker.videoIsCrop() {
  221. tempDuration = sticker.out - sticker.model_in
  222. BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
  223. }
  224. // 如果没有音频 系数为0时 使用素材的原始时长
  225. sticker.aptDuration = (coefficient == 0) ? tempDuration : tempDuration * coefficient
  226. }
  227. DispatchQueue.main.async {
  228. completeHander(currentPart)
  229. }
  230. }
  231. }
  232. // 计算所有贴纸的逻辑时长
  233. public class func calculationStickAptDuration(currentPart: PQEditSectionModel, createFirst: Bool = true, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
  234. if currentPart.sectionType == "global" {
  235. BFLog(message: "音频段落不处理计算")
  236. return
  237. }
  238. // 从素材详细界面返回 有可能是删除素材操作 这时如果没有选择发音人同时没有录音和导入数据要重新计算空文件时长
  239. let speeckAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.SPEECH.rawValue)
  240. let localAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.LOCAL.rawValue)
  241. if !currentPart.haveSelectVoice(), speeckAudioTrackModel == nil, localAudioTrackModel == nil, createFirst {
  242. // 只有视觉素材 没有文字
  243. if currentPart.sectionText.count == 0 {
  244. // 根据视觉的总时长生成空音频数据
  245. var timeCount: Double = 0
  246. for sticker in (currentPart.sectionTimeline!.visionTrack?.getEnableVisionTrackMaterials())! {
  247. if sticker.out != 0 || sticker.model_in == 0 {
  248. timeCount = timeCount + (sticker.out - sticker.model_in)
  249. } else {
  250. timeCount = timeCount + sticker.aptDuration
  251. }
  252. }
  253. BFLog(message: "计算视觉的总时长 \(timeCount)")
  254. if timeCount > 0 {
  255. let tool = PQCreateEmptyWAV(sampleRate: 8000,
  256. channel: 1,
  257. duration: timeCount,
  258. bit: 16)
  259. let timeInterval: TimeInterval = Date().timeIntervalSince1970
  260. var audioFileTempPath = exportAudiosDirectory
  261. if !directoryIsExists(dicPath: audioFileTempPath) {
  262. BFLog(message: "文件夹不存在 \(audioFileTempPath)")
  263. createDirectory(path: audioFileTempPath)
  264. }
  265. audioFileTempPath.append("empty_\(timeInterval).wav")
  266. tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
  267. currentPart.audioFilePath = audioFileTempPath.replacingOccurrences(of: documensDirectory, with: "")
  268. calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
  269. }
  270. } else {
  271. calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
  272. }
  273. } else {
  274. calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
  275. }
  276. } else {
  277. calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
  278. }
  279. }
  280. // 首尾拼接音频文件
  281. /*
  282. 因为在对音频做合并或者裁切的时候生成的音频格式是m4a的,但是m4a转成mp3会损坏音频格式,所以我当时采用先把m4a转为wav,再用wav转成mp3。
  283. */
  284. /// 合并声音
  285. /// - Parameter urls: 所有音频的URL 是全路径方便复用
  286. /// - Parameter completeHander: 返回的 URL 全路径的 URL 如果要保存替换掉前缀
  287. public class func mergeAudios(urls: [URL], completeHander: @escaping (_ fileURL: URL?) -> Void) {
  288. let timeInterval: TimeInterval = Date().timeIntervalSince1970
  289. let composition = AVMutableComposition()
  290. var totalDuration: CMTime = .zero
  291. BFLog(message: "合并文件总数 \(urls.count)")
  292. for urlStr in urls {
  293. BFLog(message: "合并的文件地址: \(urlStr)")
  294. let audioAsset = AVURLAsset(url: urlStr, options: avAssertOptions)
  295. let tracks1 = audioAsset.tracks(withMediaType: .audio)
  296. if tracks1.count == 0 {
  297. BFLog(message: "音频数据无效不进行合并,所有任务结束要确保输入的数据都正常! \(urlStr)")
  298. break
  299. }
  300. let assetTrack1: AVAssetTrack = tracks1[0]
  301. let duration1: CMTime = assetTrack1.timeRange.duration
  302. BFLog(message: "每一个文件的 duration \(CMTimeGetSeconds(duration1))")
  303. let timeRange1 = CMTimeRangeMake(start: .zero, duration: duration1)
  304. let compositionAudioTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
  305. do {
  306. //
  307. try compositionAudioTrack.insertTimeRange(timeRange1, of: assetTrack1, at: totalDuration)
  308. } catch {
  309. BFLog(message: "error is \(error)")
  310. }
  311. totalDuration = CMTimeAdd(totalDuration, audioAsset.duration)
  312. }
  313. if CMTimeGetSeconds(totalDuration) == 0 {
  314. BFLog(message: "所有数据无效")
  315. completeHander(nil)
  316. return
  317. } else {
  318. // 拼接声音文件 完成
  319. BFLog(message: "totalDuration is \(CMTimeGetSeconds(totalDuration))")
  320. }
  321. let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
  322. BFLog(message: "assetExport.supportedFileTypes is \(String(describing: assetExport?.supportedFileTypes))")
  323. assetExport?.outputFileType = .m4a
  324. // XXXX 注意文件名的后缀要和outputFileType 一致 否则会导出失败
  325. var audioFilePath = exportAudiosDirectory
  326. if !directoryIsExists(dicPath: audioFilePath) {
  327. BFLog(message: "文件夹不存在")
  328. createDirectory(path: audioFilePath)
  329. }
  330. audioFilePath.append("merge_\(timeInterval).m4a")
  331. let fileUrl = URL(fileURLWithPath: audioFilePath)
  332. assetExport?.outputURL = fileUrl
  333. assetExport?.exportAsynchronously {
  334. if assetExport!.status == .completed {
  335. // 85.819125
  336. let audioAsset = AVURLAsset(url: fileUrl, options: avAssertOptions)
  337. BFLog(message: "拼接声音文件 完成 \(fileUrl) 时长is \(CMTimeGetSeconds(audioAsset.duration))")
  338. completeHander(fileUrl)
  339. } else {
  340. print("拼接出错 \(String(describing: assetExport?.error))")
  341. completeHander(URL(string: ""))
  342. }
  343. }
  344. }
  345. /// 根据选择的画布类型计算播放器显示的位置和大小
  346. /// - Parameters:
  347. /// - editProjectModel: 项目数据
  348. /// - showType: 显示类型 1, 编辑界面 2,总览界面
  349. /// - Returns: 显示的坐标和位置
  350. public class func getShowCanvasRect(editProjectModel: PQEditProjectModel?, showType: Int, playerViewHeight: CGFloat = 216 / 667 * cScreenHeigth) -> CGRect {
  351. if editProjectModel == nil {
  352. BFLog(message: "editProjectModel is error")
  353. return CGRect()
  354. }
  355. // UI播放器的最大高度,同时最大宽度为设备宽度
  356. var showRect: CGRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
  357. let canvasType: Int = editProjectModel!.sData!.videoMetaData!.canvasType
  358. if showType == 1 { // 编辑界面
  359. switch canvasType {
  360. case videoCanvasType.origin.rawValue:
  361. // 使用有效素材第一位
  362. var firstModel: PQEditVisionTrackMaterialsModel?
  363. for part in editProjectModel!.sData!.sections {
  364. if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
  365. firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
  366. break
  367. }
  368. }
  369. if firstModel != nil {
  370. if firstModel?.width == 0 || firstModel?.height == 0 {
  371. BFLog(message: "!!!!!!!!!!!素材宽高有问题!!!!!!!!!!!")
  372. }
  373. BFLog(1, message: "第一个有效素材的大小 \(String(describing: firstModel?.width)) \(String(describing: firstModel?.height))")
  374. let ratioMaterial: Float = (firstModel?.width ?? 0) / (firstModel?.height ?? 0)
  375. if ratioMaterial > 1 {
  376. // 横屏
  377. var tempPlayerHeight = cScreenWidth * CGFloat(firstModel!.height / firstModel!.width)
  378. var scale: CGFloat = 1.0
  379. if tempPlayerHeight > playerViewHeight {
  380. scale = CGFloat(playerViewHeight) / CGFloat(tempPlayerHeight)
  381. tempPlayerHeight = tempPlayerHeight * scale
  382. }
  383. showRect = CGRect(x: (cScreenWidth - cScreenWidth * scale) / 2, y: (playerViewHeight - tempPlayerHeight) / 2, width: cScreenWidth * scale, height: tempPlayerHeight)
  384. } else {
  385. // 竖屏
  386. let playerViewWidth = (CGFloat(firstModel!.width) / CGFloat(firstModel!.height)) * playerViewHeight
  387. showRect = CGRect(x: (cScreenWidth - playerViewWidth) / 2, y: 0, width: playerViewWidth, height: playerViewHeight)
  388. }
  389. } else {
  390. // 没有视觉素材时,只有文字,语音时,默认为原始但显示的 VIEW 为 1:1
  391. showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
  392. }
  393. case videoCanvasType.oneToOne.rawValue:
  394. showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
  395. case videoCanvasType.nineToSixteen.rawValue:
  396. showRect = CGRect(x: (cScreenWidth - playerViewHeight * (9.0 / 16.0)) / 2, y: 0, width: playerViewHeight * (9.0 / 16.0), height: playerViewHeight)
  397. case videoCanvasType.sixteenToNine.rawValue:
  398. showRect = CGRect(x: 0, y: 0 + (playerViewHeight - cScreenWidth * (9.0 / 16.0)) / 2, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
  399. default:
  400. break
  401. }
  402. } else if showType == 2 { // 总览界面
  403. switch canvasType {
  404. case videoCanvasType.origin.rawValue:
  405. BFLog(message: "总览时画布的大小 \(String(describing: editProjectModel!.sData!.videoMetaData?.videoWidth)) \(String(describing: editProjectModel!.sData!.videoMetaData?.videoHeight))")
  406. // 画布的宽高 和宽高比值
  407. let materialWidth = editProjectModel!.sData!.videoMetaData?.videoWidth ?? 0
  408. let materialHeight = editProjectModel!.sData!.videoMetaData?.videoHeight ?? 1
  409. let ratioMaterial: Float = Float(materialWidth) / Float(materialHeight)
  410. if ratioMaterial > 1 {
  411. // 横屏
  412. showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * CGFloat(materialHeight) / CGFloat(materialWidth))
  413. } else if ratioMaterial < 1 {
  414. // 竖屏
  415. showRect = CGRect(x: (cScreenWidth - cScreenWidth * CGFloat(materialWidth) / CGFloat(materialHeight)) / 2, y: 0, width: cScreenWidth * (CGFloat(materialWidth) / CGFloat(materialHeight)), height: cScreenWidth)
  416. BFLog(message: "showRect is \(showRect)")
  417. } else {
  418. showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
  419. }
  420. case videoCanvasType.oneToOne.rawValue:
  421. showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
  422. case videoCanvasType.nineToSixteen.rawValue:
  423. showRect = CGRect(x: (cScreenWidth - cScreenWidth * (9.0 / 16.0)) / 2, y: 0, width: cScreenWidth * (9.0 / 16.0), height: cScreenWidth)
  424. case videoCanvasType.sixteenToNine.rawValue:
  425. showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
  426. default:
  427. break
  428. }
  429. }
  430. return showRect
  431. }
  432. /*
  433. 1, 加工入口进入编辑界面 默认画布?默认为 原始
  434. 2,进入编辑界面如果选了一个素材 画布就是实际大小,
  435. 3,没视觉素材时 点击原始显示1:1
  436. 4, 上传入口进入编辑界面 默认画布为原始
  437. 5, 从草稿箱进来时,使用恢复的画布大小
  438. 6, 如果选择了原始,移动素材后都按最新的第一个素材修改画布
  439. */
  440. /// sdata json canvastype 转到 UI 所使用类型
  441. /// - Parameter projectModel: project sdata
  442. /// - Returns: UI 使用类型
  443. public class func videoCanvasTypeToAspectRatio(projectModel: PQEditProjectModel?) -> aspectRatio? {
  444. // add by ak 给素材详情界面传比例参数如果是原始大小的要传 size
  445. var aspectRatioTemp: aspectRatio?
  446. if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.origin.rawValue {
  447. var firstModel: PQEditVisionTrackMaterialsModel?
  448. for part in projectModel!.sData!.sections {
  449. if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
  450. firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
  451. break
  452. }
  453. }
  454. if firstModel != nil {
  455. aspectRatioTemp = .origin(width: CGFloat(firstModel!.width), height: CGFloat(firstModel!.height))
  456. } else {
  457. aspectRatioTemp = .origin(width: CGFloat(projectModel?.sData?.videoMetaData?.videoWidth ?? 0), height: CGFloat(projectModel?.sData?.videoMetaData?.videoHeight ?? 0))
  458. }
  459. } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.oneToOne.rawValue {
  460. aspectRatioTemp = .oneToOne
  461. } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.nineToSixteen.rawValue {
  462. aspectRatioTemp = .nineToSixteen
  463. } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.sixteenToNine.rawValue {
  464. aspectRatioTemp = .sixteenToNine
  465. }
  466. return aspectRatioTemp
  467. }
  468. public class func getCanvasBtnName(canvasType: videoCanvasType) -> (String, String) {
  469. var btnText: String = "自适应"
  470. var btnImageName: String = "settingZoom_origin_h"
  471. if canvasType == .origin {
  472. btnText = "自适应"
  473. btnImageName = "settingZoom_origin_h"
  474. } else if canvasType == .oneToOne {
  475. btnText = "1:1"
  476. btnImageName = "settingZoom_oneToOne_h"
  477. } else if canvasType == .sixteenToNine {
  478. btnText = "16:9"
  479. btnImageName = "settingZoom_sixteenToNine_h"
  480. } else if canvasType == .nineToSixteen {
  481. btnText = "9:16"
  482. btnImageName = "settingZoom_nineToSixteen_h"
  483. }
  484. return (btnText, btnImageName)
  485. }
  486. }
  487. // MARK: - 混音相关
  488. /// 混音相关
  489. extension PQPlayerViewModel {
  490. /// 混音合成
  491. /// - Parameters:
  492. /// - originAsset: 空音乐文件素材
  493. /// - bgmData: 背景音乐
  494. /// - videoStickers: 视频素材
  495. /// - originMusicDuration : 要播放的时长
  496. /// - lastSecondPoint : 音频长度不够时,拼接音频文件时的结束时间,推荐卡点的倒数第二位
  497. /// - startTime: 裁剪的开始位置。
  498. /// - Returns:
  499. public class func setupAudioMix(originAsset: AVURLAsset, bgmData: PQVoiceModel?, videoStickers: [PQEditVisionTrackMaterialsModel]?,originMusicDuration:Float = 0,clipAudioRange: CMTimeRange = CMTimeRange.zero,startTime:CMTime = .zero ) -> (AVMutableAudioMix, AVMutableComposition) {
  500. let composition = AVMutableComposition()
  501. let audioMix = AVMutableAudioMix()
  502. var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
  503. // 处理选择的主音乐
  504. if(originMusicDuration > Float(CMTimeGetSeconds(clipAudioRange.duration))){
  505. BFLog(message: "要播放的时间长,比原音频要长进行拼接originMusicDuration:\(originMusicDuration) originAsset.duration \(CMTimeGetSeconds(clipAudioRange.duration))")
  506. let originaParameters = dealWithOriginAssetTrack(originAsset: originAsset, totalDuration: Float64(originMusicDuration), composition: composition,clipAudioRange: clipAudioRange,mStartTime: startTime)
  507. BFLog(message: "originaParameters count \(originaParameters.count)")
  508. if originaParameters.count > 0 {
  509. tempParameters = tempParameters + originaParameters
  510. }
  511. }else{
  512. BFLog(message: "音频不用拼接:\(CMTimeGetSeconds(originAsset.duration))")
  513. let parameters = mixAudioTrack(audioAsset: originAsset, trackTimeRange: CMTimeRange(start: .zero, end: originAsset.duration), composition: composition)
  514. if parameters != nil {
  515. tempParameters.append(parameters!)
  516. }else{
  517. BFLog(message: "parameters is error \(CMTimeGetSeconds(originAsset.duration))")
  518. }
  519. }
  520. // 处理背景音乐
  521. if bgmData != nil, bgmData?.localPath != nil {
  522. let bgmParameters = dealWithBGMTrack(bgmData: bgmData!, totalDuration: originAsset.duration.seconds, composition: composition)
  523. if bgmParameters.count > 0 {
  524. tempParameters = tempParameters + bgmParameters
  525. }
  526. }
  527. // 处理素材音乐
  528. if videoStickers != nil, (videoStickers?.count ?? 0) > 0 {
  529. for sticker in videoStickers! {
  530. if sticker.volumeGain == 0 {
  531. // 如果添加了会有刺啦音
  532. BFLog(message: "音频音量 为0 不添加")
  533. continue
  534. }
  535. let stickerParameters = dealWithMaterialTrack(stickerModel: sticker, composition: composition)
  536. if stickerParameters.count > 0 {
  537. tempParameters = tempParameters + stickerParameters
  538. }
  539. }
  540. }
  541. audioMix.inputParameters = tempParameters
  542. // 导出音乐
  543. // exportAudio(comosition: composition)
  544. return (audioMix, composition)
  545. }
  546. /// 处理原主音乐音轨 e.g. 原音频时长只有30s 要播放 250s 的音频 拼接原音频音轨
  547. /// - Parameters:
  548. /// - originAsset: 原音频文件地址
  549. /// - composition:
  550. /// - Returns:
  551. public class func dealWithOriginAssetTrack(originAsset: AVURLAsset, totalDuration: Float64, composition: AVMutableComposition,clipAudioRange: CMTimeRange = CMTimeRange.zero,mStartTime:CMTime = .zero ) -> [AVMutableAudioMixInputParameters] {
  552. var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
  553. let volume:Float = 1.0
  554. let originaDuration = CMTimeGetSeconds(clipAudioRange.duration)
  555. BFLog(message: "处理主音频 原始时长startTime = \(originaDuration) 要显示时长totalDuration = \(totalDuration)")
  556. //整倍数
  557. let count = Int(totalDuration) / Int(originaDuration)
  558. // count = count + 1
  559. //有余数多 clip 一整段
  560. let row = totalDuration - Double(count) * originaDuration
  561. //已经拼接的总时长
  562. var clipTotalDuration:Float = 0.0
  563. if count > 0 {
  564. for index in 0 ..< count {
  565. BFLog(message: "this is running running")
  566. //第一段是用户选择的开始时间 到倒数第二个卡点, 其它段都是从推荐卡点到倒数第二个卡点
  567. var startTime = CMTime.zero
  568. var trackTimeRange = clipAudioRange
  569. if(index == 0){
  570. startTime = mStartTime
  571. trackTimeRange = CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue(CMTimeGetSeconds(clipAudioRange.end)), timescale: playerTimescaleInt))
  572. clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
  573. }else{
  574. // (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))为用户选择的第一段时长
  575. startTime = CMTime(value: CMTimeValue((CMTimeGetSeconds( clipAudioRange.duration) * Double(index) + (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))) * Float64(playerTimescaleInt)), timescale: playerTimescaleInt)
  576. trackTimeRange = clipAudioRange
  577. clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
  578. }
  579. // BFLog(1, message: "原音频时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
  580. let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  581. if parameters != nil {
  582. tempParameters.append(parameters!)
  583. }else{
  584. BFLog(message: "接拼出现错误!!!!")
  585. }
  586. }
  587. }
  588. if(row > 0){
  589. let startTime = CMTime(value: CMTimeValue(clipTotalDuration * Float(playerTimescaleInt)), timescale: playerTimescaleInt)
  590. let trackTimeRange = CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue((CMTimeGetSeconds(startTime) + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  591. BFLog(1, message: "最后一小段音乐时长短:count = \(count),startTime = \(CMTimeShow(startTime)),trackTimeRange = \(CMTimeRangeShow(trackTimeRange))")
  592. let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  593. if parameters != nil {
  594. tempParameters.append(parameters!)
  595. }
  596. clipTotalDuration = clipTotalDuration + Float(row)
  597. }
  598. BFLog(message: "拼接的音频总时长: \(clipTotalDuration)")
  599. return tempParameters
  600. }
  601. /// 处理背景音乐音轨
  602. /// - Parameters:
  603. /// - stickerModel: <#stickerModel description#>
  604. /// - composition: <#composition description#>
  605. /// - Returns: <#description#>
  606. public class func dealWithBGMTrack(bgmData: PQVoiceModel, totalDuration: Float64, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
  607. var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
  608. let bgmAsset = AVURLAsset(url: URL(fileURLWithPath: bgmData.localPath ?? ""), options: avAssertOptions)
  609. let volume = Float(bgmData.volume) / 100.0
  610. let bgmDuration = (Float64(bgmData.duration ?? "0") ?? 0) - bgmData.startTime
  611. BFLog(message: "处理背景音乐:startTime = \(bgmData.startTime),bgmDuration = \(bgmDuration),totalDuration = \(totalDuration)")
  612. if bgmDuration < totalDuration {
  613. let count = Int(totalDuration) / Int(bgmDuration)
  614. let row = totalDuration - Double(count) * bgmDuration
  615. if count > 0 {
  616. for index in 0 ..< count {
  617. let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(index) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
  618. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + bgmDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  619. BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
  620. let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  621. if parameters != nil {
  622. tempParameters.append(parameters!)
  623. }
  624. }
  625. }
  626. if row > 0 {
  627. let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(count) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
  628. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  629. BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
  630. let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  631. if parameters != nil {
  632. tempParameters.append(parameters!)
  633. }
  634. }
  635. } else {
  636. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + totalDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  637. BFLog(message: "背景音乐时长长:trackTimeRange = \(trackTimeRange)")
  638. let bgmParameters = mixAudioTrack(audioAsset: bgmAsset, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  639. if bgmParameters != nil {
  640. tempParameters.append(bgmParameters!)
  641. }
  642. }
  643. return tempParameters
  644. }
  645. /// 处理素材音轨
  646. /// - Parameters:
  647. /// - stickerModel: <#stickerModel description#>
  648. /// - composition: <#composition description#>
  649. /// - Returns: <#description#>
  650. public class func dealWithMaterialTrack(stickerModel: PQEditVisionTrackMaterialsModel, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
  651. var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
  652. let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + stickerModel.locationPath), options: avAssertOptions)
  653. let volume = Float(stickerModel.volumeGain) / 100
  654. let rangeStart = stickerModel.model_in
  655. var rangeEnd = stickerModel.out
  656. if rangeEnd == 0 {
  657. rangeEnd = audioAsset.duration.seconds
  658. }
  659. var originDuration = (rangeEnd - rangeStart)
  660. if stickerModel.aptDuration < originDuration {
  661. originDuration = stickerModel.aptDuration
  662. }
  663. if stickerModel.aptDuration > originDuration, stickerModel.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
  664. let count = originDuration == 0 ? 0 : Int(stickerModel.aptDuration) / Int(originDuration)
  665. let row = stickerModel.aptDuration - Double(count) * originDuration
  666. if count > 0 {
  667. for index in 0 ..< count {
  668. let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(index)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
  669. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  670. let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  671. if parameters != nil {
  672. tempParameters.append(parameters!)
  673. }
  674. }
  675. }
  676. if row > 0 {
  677. let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(count)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
  678. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  679. let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  680. if parameters != nil {
  681. tempParameters.append(parameters!)
  682. }
  683. }
  684. } else {
  685. let startTime = CMTime(value: CMTimeValue(stickerModel.timelineIn * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
  686. let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
  687. let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
  688. if parameters != nil {
  689. tempParameters.append(parameters!)
  690. }
  691. }
  692. return tempParameters
  693. }
  694. /// 混音添加音轨
  695. /// - Parameters:
  696. /// - audioAsset: 素材资源
  697. /// - startTime: 从什么时间开始播放
  698. /// - trackTimeRange: 播放素材范围
  699. /// - volume:音轨音量
  700. /// - composition: <#composition description#>
  701. /// - Returns: <#description#>
  702. public class func mixAudioTrack(audioAsset: AVURLAsset, startTime: CMTime = CMTime.zero, trackTimeRange: CMTimeRange, volume: Float = 1, composition: AVMutableComposition) -> AVMutableAudioMixInputParameters? {
  703. BFLog(message: "startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
  704. // 第一个音轨
  705. // let assetTrack : AVAssetTrack? = audioAsset.tracks(withMediaType: .audio).first
  706. // 所有音轨
  707. let assetTracks: [AVAssetTrack]? = audioAsset.tracks(withMediaType: .audio)
  708. if assetTracks != nil, (assetTracks?.count ?? 0) > 0 {
  709. let audioTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
  710. let mixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
  711. mixInputParameters.setVolume(volume, at: startTime)
  712. do {
  713. // 第一个音轨插入到原音的开始和结束位置
  714. // try audioTrack?.insertTimeRange(trackTimeRange, of: assetTrack!, at: startTime)
  715. // 所有音轨插入到原音的开始和结束位置
  716. let timeRanges = Array(repeating: NSValue(timeRange: trackTimeRange), count: assetTracks!.count)
  717. try audioTrack?.insertTimeRanges(timeRanges, of: assetTracks!, at: startTime)
  718. } catch {
  719. BFLog(message: "error is \(error)")
  720. }
  721. return mixInputParameters
  722. }
  723. return nil
  724. }
  725. // 导出音频
  726. /// - Parameter comosition: <#comosition description#>
  727. /// - Returns: <#description#>
  728. public class func exportAudio(comosition: AVAsset) {
  729. let outPutFilePath = URL(fileURLWithPath: tempDirectory + "/temp.mp4")
  730. // 删除以创建地址
  731. try? FileManager.default.removeItem(at: outPutFilePath)
  732. let assetExport = AVAssetExportSession(asset: comosition, presetName: AVAssetExportPresetMediumQuality)
  733. assetExport?.outputFileType = .mp4
  734. assetExport?.outputURL = outPutFilePath
  735. assetExport?.exportAsynchronously(completionHandler: {
  736. print("assetExport == \(assetExport?.status.rawValue ?? 0),error = \(String(describing: assetExport?.error))")
  737. DispatchQueue.main.async {}
  738. })
  739. }
  740. }