// // BFVideoCompositionManager.swift // BFRecordScreenKit // // Created by 胡志强 on 2021/12/20. // import Foundation import GPUImage import BFCommonKit import Photos class BFVideoCompositionManager { var saveMovie = GPUImageMovie() var itemModels = [BFRecordItemModel]() var currItemModelIndex = 0 var write = GPUImageMovieWriter(movieURL: URL(fileURLWithPath: exportVideosDirectory+"test.mov"), size: UIScreen.main.bounds.size, fileType: "com.apple.quicktime-movie", outputSettings: [ AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: 1080, AVVideoHeightKey: 1920, AVVideoCompressionPropertiesKey: [AVVideoAverageBitRateKey: 6*1024*1024 ] ] as [String : Any]) func export(avsss:AVURLAsset){ let start = Date() saveMovie = GPUImageMovie(asset: avsss) if let saveWrite = write{ let filter = GPUImageFilter() saveMovie.addTarget(filter) // saveWrite.shouldPassthroughAudio = false saveWrite.encodingLiveVideo = false filter.addTarget(saveWrite) // saveMovie.audioEncodingTarget = saveWrite; saveMovie.enableSynchronizedEncoding(using: saveWrite) saveMovie.startProcessing() saveWrite.startRecording() DispatchQueue.global().asyncAfter(deadline: .now() + 2) { [weak self] in saveWrite.finishRecording() self?.saveMovie.endProcessing() } saveWrite.completionBlock = {[weak self] in BFLog(1, message: "导出完毕:\(Date().timeIntervalSince(start))") self?.mix(date: start, index: 0) } } } func mix(date:Date, index:Double){ guard let asset = itemModels[currItemModelIndex].videoAsset else { return } if 2*index > asset.duration.seconds { return } let comp = AVMutableComposition() let audio = comp.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) let video = comp.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) // [AVURLAssetPreferPreciseDurationAndTimingKey:@YES] let bsset = AVURLAsset(url: URL(fileURLWithPath: exportVideosDirectory+"test.mov")) if let assetVideoTrack = bsset.tracks(withMediaType: .video).first { try? video?.insertTimeRange(CMTimeRange(start: CMTime(seconds: 2*index, preferredTimescale: 1000), end: CMTime(seconds: 2*index+2, preferredTimescale: 1000)), of: assetVideoTrack, at: .zero) } if let asset = itemModels[currItemModelIndex].videoAsset { if let assetAudioTrack = asset.tracks(withMediaType: .audio).first { try? audio?.insertTimeRange(CMTimeRange(start: .zero, end: bsset.duration), of: assetAudioTrack, at: .zero) } } // AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition // presetName:AVAssetExportPreset1280x720]; // exporter.videoComposition = videoComp; try? FileManager.default.removeItem(at: URL(fileURLWithPath: exportVideosDirectory+"export.mov")) let export = AVAssetExportSession(asset: comp, presetName: AVAssetExportPreset1920x1080) export?.outputURL = URL(fileURLWithPath: exportVideosDirectory+"export.mov") export?.outputFileType = .mov export?.exportAsynchronously(completionHandler: { BFLog(1, message: "合成完毕:\(Date().timeIntervalSince(date))") PHPhotoLibrary.shared().performChanges { PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: exportVideosDirectory+"export.mov")) } completionHandler: {[weak self] isFinished, err in BFLog(1, message: "save \(Date().timeIntervalSince(date)) , \(err)") } }) } }