|
@@ -12,7 +12,7 @@
|
|
|
|
|
|
/*
|
|
/*
|
|
AVAssetReader可用于读取AVAsset媒体资源的轨道数据,支持解码、格式转换、mix等操作。但注意事项也不少:
|
|
AVAssetReader可用于读取AVAsset媒体资源的轨道数据,支持解码、格式转换、mix等操作。但注意事项也不少:
|
|
-
|
|
|
|
|
|
+
|
|
AVAssetReader不可重复调用startReading,当出现fail或complete状态后也不能重复调用;
|
|
AVAssetReader不可重复调用startReading,当出现fail或complete状态后也不能重复调用;
|
|
AVAssetReader做解码的时候,切换后台/来电会失去GPU权限,造成解码失败,AVAssetReader也变成fail状态。异常打断结束后,需要重启reader,并确定reader重启成功,否则需要retry;
|
|
AVAssetReader做解码的时候,切换后台/来电会失去GPU权限,造成解码失败,AVAssetReader也变成fail状态。异常打断结束后,需要重启reader,并确定reader重启成功,否则需要retry;
|
|
AVAssetReader启动后调用seek时,并不会很精准seek到目标点,一般会比指定的时间早几帧(AVPlayer的精准seek,也有同样的问题),需要记录seek的目标时间点,如果seek后读取出的buffer携带的 pts比seek的目标时间小,需要抛弃该数据;
|
|
AVAssetReader启动后调用seek时,并不会很精准seek到目标点,一般会比指定的时间早几帧(AVPlayer的精准seek,也有同样的问题),需要记录seek的目标时间点,如果seek后读取出的buffer携带的 pts比seek的目标时间小,需要抛弃该数据;
|
|
@@ -23,30 +23,30 @@
|
|
AVAssetReader不支持m3u8文件,回出现读取不到轨道信息的情况,如果需要解析HLS视频,需要使用FFMpeg进行解封装和VideoToolBox解码;
|
|
AVAssetReader不支持m3u8文件,回出现读取不到轨道信息的情况,如果需要解析HLS视频,需要使用FFMpeg进行解封装和VideoToolBox解码;
|
|
AVAssetReader内部创建了解码器和缓存列表,但解码器数量是有限制的(同AVPlayerItem)。
|
|
AVAssetReader内部创建了解码器和缓存列表,但解码器数量是有限制的(同AVPlayerItem)。
|
|
当然AVAssetReader 只做demux,不做解码工作时可以避免上述一些问题,但需要自行使用VideoToolBox进行硬解,pixel format转换也得单独处理。
|
|
当然AVAssetReader 只做demux,不做解码工作时可以避免上述一些问题,但需要自行使用VideoToolBox进行硬解,pixel format转换也得单独处理。
|
|
-
|
|
|
|
|
|
+
|
|
有多种方法可以创建CMTime实例,不过最常见的方法是使用CMTimeMake函数,指定一个64位的value参数和32位的timescale参数,比如,创建一个代表5s的CMTime表达式有下面几种不同的方式:
|
|
有多种方法可以创建CMTime实例,不过最常见的方法是使用CMTimeMake函数,指定一个64位的value参数和32位的timescale参数,比如,创建一个代表5s的CMTime表达式有下面几种不同的方式:
|
|
CMTime t1 =CMTimeMake(5, 1);
|
|
CMTime t1 =CMTimeMake(5, 1);
|
|
CMTime t2 =CMTimeMake(3000, 600);
|
|
CMTime t2 =CMTimeMake(3000, 600);
|
|
CMTime t3 =CMTimeMake(5000, 1000);
|
|
CMTime t3 =CMTimeMake(5000, 1000);
|
|
-
|
|
|
|
|
|
+
|
|
二,seek 操作
|
|
二,seek 操作
|
|
-
|
|
|
|
|
|
+
|
|
AVAssetReader并不适合频繁随机读取的操作,如果需要频繁seek可能需要别的方式实现。
|
|
AVAssetReader并不适合频繁随机读取的操作,如果需要频繁seek可能需要别的方式实现。
|
|
在开始读取之前,可以对读取的范围进行设置,当开始读取后不可以修改,只能顺序向后读。
|
|
在开始读取之前,可以对读取的范围进行设置,当开始读取后不可以修改,只能顺序向后读。
|
|
有两种方案来调整读取范围:
|
|
有两种方案来调整读取范围:
|
|
-
|
|
|
|
|
|
+
|
|
output中可以设置supportsRandomAccess,当为true时,可以重置读取范围,但需要调用方调用copyNextSampleBuffer,直到该方法返回NULL。
|
|
output中可以设置supportsRandomAccess,当为true时,可以重置读取范围,但需要调用方调用copyNextSampleBuffer,直到该方法返回NULL。
|
|
或者重新初始化一个AVAssetReader来设置读取时间。
|
|
或者重新初始化一个AVAssetReader来设置读取时间。
|
|
如果尝试第一种方案,需要使用seek,可以尝试每次设置一个不太长的区间,以保证读取完整个区间不会耗时太多,且时间间隔最好以关键帧划分。
|
|
如果尝试第一种方案,需要使用seek,可以尝试每次设置一个不太长的区间,以保证读取完整个区间不会耗时太多,且时间间隔最好以关键帧划分。
|
|
-
|
|
|
|
|
|
+
|
|
fps
|
|
fps
|
|
-
|
|
|
|
|
|
+
|
|
25.0 fps : 0.0000 0.0400 0.0800 0.1200 0.1600 0.2000 0.2400 0.2800 0.3200 0.3600 0.4000 0.4400 0.4800 0.5200 0.5600 0.6000 0.6400 0.6800 0.7200 0.7600 0.8000 0.8400 0.8800 0.9200 0.9600 1.0000 1.0400 1.0800 1.1200 1.1600 1.2000
|
|
25.0 fps : 0.0000 0.0400 0.0800 0.1200 0.1600 0.2000 0.2400 0.2800 0.3200 0.3600 0.4000 0.4400 0.4800 0.5200 0.5600 0.6000 0.6400 0.6800 0.7200 0.7600 0.8000 0.8400 0.8800 0.9200 0.9600 1.0000 1.0400 1.0800 1.1200 1.1600 1.2000
|
|
30.0 fps : 0.0000 0.0333 0.0667 0.1000 0.1333 0.1667 0.2000 0.2333 0.2667 0.3000 0.3333 0.3667 0.4000 0.4333 0.4667 0.5000 0.5333 0.5667 0.6000 0.6333 0.6667 0.7000 0.7333 0.7667 0.8000 0.8333 0.8667 0.9000 0.9333 0.9667 1.0000
|
|
30.0 fps : 0.0000 0.0333 0.0667 0.1000 0.1333 0.1667 0.2000 0.2333 0.2667 0.3000 0.3333 0.3667 0.4000 0.4333 0.4667 0.5000 0.5333 0.5667 0.6000 0.6333 0.6667 0.7000 0.7333 0.7667 0.8000 0.8333 0.8667 0.9000 0.9333 0.9667 1.0000
|
|
60.0 fps : 0.0000 0.0167 0.0333 0.0500 0.0667 0.0833 0.1000 0.1167 0.1333 0.1500 0.1667 0.1833 0.2000 0.2167 0.2333 0.2500 0.2667 0.2833 0.3000 0.3167 0.3333 0.3500 0.3667 0.3833 0.4000 0.4167 0.4333 0.4500 0.4667 0.4833 0.5000
|
|
60.0 fps : 0.0000 0.0167 0.0333 0.0500 0.0667 0.0833 0.1000 0.1167 0.1333 0.1500 0.1667 0.1833 0.2000 0.2167 0.2333 0.2500 0.2667 0.2833 0.3000 0.3167 0.3333 0.3500 0.3667 0.3833 0.4000 0.4167 0.4333 0.4500 0.4667 0.4833 0.5000
|
|
80.0 fps : 0.0000 0.0125 0.0250 0.0375 0.0500 0.0625 0.0750 0.0875 0.1000 0.1125 0.1250 0.1375 0.1500 0.1625 0.1750 0.1875 0.2000 0.2125 0.2250 0.2375 0.2500 0.2625 0.2750 0.2875 0.3000 0.3125 0.3250 0.3375 0.3500 0.3625 0.3750
|
|
80.0 fps : 0.0000 0.0125 0.0250 0.0375 0.0500 0.0625 0.0750 0.0875 0.1000 0.1125 0.1250 0.1375 0.1500 0.1625 0.1750 0.1875 0.2000 0.2125 0.2250 0.2375 0.2500 0.2625 0.2750 0.2875 0.3000 0.3125 0.3250 0.3375 0.3500 0.3625 0.3750
|
|
120.0 fps : 0.0000 0.0083 0.0167 0.0250 0.0333 0.0417 0.0500 0.0583 0.0667 0.0750 0.0833 0.0917 0.1000 0.1083 0.1167 0.1250 0.1333 0.1417 0.1500 0.1583 0.1667 0.1750 0.1833 0.1917 0.2000 0.2083 0.2167 0.2250 0.2333 0.2417 0.2500
|
|
120.0 fps : 0.0000 0.0083 0.0167 0.0250 0.0333 0.0417 0.0500 0.0583 0.0667 0.0750 0.0833 0.0917 0.1000 0.1083 0.1167 0.1250 0.1333 0.1417 0.1500 0.1583 0.1667 0.1750 0.1833 0.1917 0.2000 0.2083 0.2167 0.2250 0.2333 0.2417 0.2500
|
|
-
|
|
|
|
|
|
+
|
|
*/
|
|
*/
|
|
|
|
|
|
import Foundation
|
|
import Foundation
|
|
@@ -55,36 +55,36 @@ import BFUIKit
|
|
|
|
|
|
public class PQMovieFilter: PQBaseFilter {
|
|
public class PQMovieFilter: PQBaseFilter {
|
|
public var runBenchmark = false
|
|
public var runBenchmark = false
|
|
-
|
|
|
|
|
|
+
|
|
public weak var delegate: MovieInputDelegate?
|
|
public weak var delegate: MovieInputDelegate?
|
|
-
|
|
|
|
|
|
+
|
|
public var yuvConversionShader: ShaderProgram?
|
|
public var yuvConversionShader: ShaderProgram?
|
|
public var asset: AVAsset?
|
|
public var asset: AVAsset?
|
|
public var videoComposition: AVVideoComposition?
|
|
public var videoComposition: AVVideoComposition?
|
|
// 使用原始速度
|
|
// 使用原始速度
|
|
public var playAtActualSpeed: Bool = true
|
|
public var playAtActualSpeed: Bool = true
|
|
-
|
|
|
|
|
|
+
|
|
// Time in the video where it should start.
|
|
// Time in the video where it should start.
|
|
public var requestedStartTime: CMTime?
|
|
public var requestedStartTime: CMTime?
|
|
-
|
|
|
|
|
|
+
|
|
// Last sample time that played.
|
|
// Last sample time that played.
|
|
public private(set) var currentTime: CMTime = .zero
|
|
public private(set) var currentTime: CMTime = .zero
|
|
-
|
|
|
|
|
|
+
|
|
// Progress block of the video with a paramater value of 0-1.
|
|
// Progress block of the video with a paramater value of 0-1.
|
|
// Can be used to check video encoding progress. Not called from main thread.
|
|
// Can be used to check video encoding progress. Not called from main thread.
|
|
public var progress: ((Double) -> Void)?
|
|
public var progress: ((Double) -> Void)?
|
|
-
|
|
|
|
|
|
+
|
|
public var audioSettings: [String: Any]?
|
|
public var audioSettings: [String: Any]?
|
|
-
|
|
|
|
|
|
+
|
|
public var movieFramebuffer: Framebuffer?
|
|
public var movieFramebuffer: Framebuffer?
|
|
public var framebufferUserInfo: [AnyHashable: Any]?
|
|
public var framebufferUserInfo: [AnyHashable: Any]?
|
|
-
|
|
|
|
|
|
+
|
|
@Atomic var assetReader: AVAssetReader?
|
|
@Atomic var assetReader: AVAssetReader?
|
|
-
|
|
|
|
|
|
+
|
|
public var moveSticker: PQEditVisionTrackMaterialsModel?
|
|
public var moveSticker: PQEditVisionTrackMaterialsModel?
|
|
-
|
|
|
|
|
|
+
|
|
public var videoSize: CGSize = .zero
|
|
public var videoSize: CGSize = .zero
|
|
-
|
|
|
|
|
|
+
|
|
// 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
|
|
// 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
|
|
public var lastImageBuffer: CVImageBuffer?
|
|
public var lastImageBuffer: CVImageBuffer?
|
|
//
|
|
//
|
|
@@ -93,18 +93,18 @@ public class PQMovieFilter: PQBaseFilter {
|
|
public var currentRenderSampleBuffer: CMSampleBuffer?
|
|
public var currentRenderSampleBuffer: CMSampleBuffer?
|
|
// 旋转角度值
|
|
// 旋转角度值
|
|
public var mImageOrientation: ImageOrientation = .portrait
|
|
public var mImageOrientation: ImageOrientation = .portrait
|
|
-
|
|
|
|
|
|
+
|
|
public var inputSize: GLSize = GLSize(width: 0, height: 0)
|
|
public var inputSize: GLSize = GLSize(width: 0, height: 0)
|
|
-
|
|
|
|
|
|
+
|
|
public var timebaseInfo = mach_timebase_info_data_t()
|
|
public var timebaseInfo = mach_timebase_info_data_t()
|
|
-
|
|
|
|
|
|
+
|
|
public var currentThread: Thread?
|
|
public var currentThread: Thread?
|
|
/// Use serial queue to ensure that the picture is smooth
|
|
/// Use serial queue to ensure that the picture is smooth
|
|
-// var seekQueue: DispatchQueue!
|
|
|
|
-
|
|
|
|
|
|
+ // var seekQueue: DispatchQueue!
|
|
|
|
+
|
|
// 原视频素材的 FPS
|
|
// 原视频素材的 FPS
|
|
public var stickerFPS: Float = 0
|
|
public var stickerFPS: Float = 0
|
|
-
|
|
|
|
|
|
+
|
|
// 开始时间,创建 filter 显示的时候有
|
|
// 开始时间,创建 filter 显示的时候有
|
|
public var startTimeStamp: CMTime?
|
|
public var startTimeStamp: CMTime?
|
|
// 最后一次显示帧时间戳
|
|
// 最后一次显示帧时间戳
|
|
@@ -114,12 +114,15 @@ public class PQMovieFilter: PQBaseFilter {
|
|
public var framebufferIndex:Int = 0
|
|
public var framebufferIndex:Int = 0
|
|
|
|
|
|
public var imageVertexBuffer: GLuint = 0
|
|
public var imageVertexBuffer: GLuint = 0
|
|
-
|
|
|
|
|
|
+
|
|
|
|
+ //临时方案,是否是卡点模式
|
|
|
|
+ public var isPointModel:Bool = false
|
|
|
|
+
|
|
deinit {
|
|
deinit {
|
|
FilterLog(1, message: "movie filter release")
|
|
FilterLog(1, message: "movie filter release")
|
|
clearData()
|
|
clearData()
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public override func clearData() {
|
|
public override func clearData() {
|
|
super.clearData()
|
|
super.clearData()
|
|
if assetReader != nil {
|
|
if assetReader != nil {
|
|
@@ -131,18 +134,18 @@ public class PQMovieFilter: PQBaseFilter {
|
|
imageVertexBuffer = 0
|
|
imageVertexBuffer = 0
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public init(url: URL) {
|
|
public init(url: URL) {
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
-
|
|
|
|
|
|
+
|
|
do {
|
|
do {
|
|
try loadAsset(url: url, videoComposition: nil)
|
|
try loadAsset(url: url, videoComposition: nil)
|
|
-
|
|
|
|
|
|
+
|
|
} catch {
|
|
} catch {
|
|
NXLog(message: "load asset with error: \(error)")
|
|
NXLog(message: "load asset with error: \(error)")
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public init(movieSticker: PQEditVisionTrackMaterialsModel) {
|
|
public init(movieSticker: PQEditVisionTrackMaterialsModel) {
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
moveSticker = movieSticker
|
|
moveSticker = movieSticker
|
|
@@ -151,100 +154,117 @@ public class PQMovieFilter: PQBaseFilter {
|
|
if moveSticker!.videoIsCrop() {
|
|
if moveSticker!.videoIsCrop() {
|
|
requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
|
|
requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
do {
|
|
do {
|
|
// 测试代码
|
|
// 测试代码
|
|
// try loadAsset(url:URL(fileURLWithPath:"22222.MP4", relativeTo:Bundle.main.resourceURL!), videoComposition: nil)
|
|
// try loadAsset(url:URL(fileURLWithPath:"22222.MP4", relativeTo:Bundle.main.resourceURL!), videoComposition: nil)
|
|
/* locationPath 有可能直接使用系统相册地址 处理不同 IOS 版本 路径有所区别 1,e.g.视频地址 var/mobile/Media/DCIM/125APPLE/IMG_5189.MOV 就不用拼接沙盒地址了
|
|
/* locationPath 有可能直接使用系统相册地址 处理不同 IOS 版本 路径有所区别 1,e.g.视频地址 var/mobile/Media/DCIM/125APPLE/IMG_5189.MOV 就不用拼接沙盒地址了
|
|
- 2,try find move file from bfframework bundle e.g. 库 bundle 的地址 "/var/containers/Bundle/Application/AD663220-6AF2-4841-AF82-071C10D78959/MusicVideoPlus.app/BFFramework.bundle/endMovieA.mp4"
|
|
|
|
- */
|
|
|
|
|
|
+ 2,try find move file from bfframework bundle e.g. 库 bundle 的地址 "/var/containers/Bundle/Application/AD663220-6AF2-4841-AF82-071C10D78959/MusicVideoPlus.app/BFFramework.bundle/endMovieA.mp4"
|
|
|
|
+ */
|
|
var videoFilePath = movieSticker.locationPath
|
|
var videoFilePath = movieSticker.locationPath
|
|
if (!videoFilePath.contains("var/mobile/Media")) && (!videoFilePath.contains("BFFramework_Resources.bundle")) {
|
|
if (!videoFilePath.contains("var/mobile/Media")) && (!videoFilePath.contains("BFFramework_Resources.bundle")) {
|
|
videoFilePath = documensDirectory + videoFilePath
|
|
videoFilePath = documensDirectory + videoFilePath
|
|
}
|
|
}
|
|
FilterLog(2, message: "视频地址 \(String(describing: videoFilePath))")
|
|
FilterLog(2, message: "视频地址 \(String(describing: videoFilePath))")
|
|
try loadAsset(url: URL(fileURLWithPath: videoFilePath), videoComposition: nil)
|
|
try loadAsset(url: URL(fileURLWithPath: videoFilePath), videoComposition: nil)
|
|
-
|
|
|
|
|
|
+
|
|
} catch {
|
|
} catch {
|
|
NXLog(message: "load asset with error: \(error)")
|
|
NXLog(message: "load asset with error: \(error)")
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
FilterLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut) 裁剪开始时间:\(movieSticker.model_in) 裁剪结束时间:\(movieSticker.out) 路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
FilterLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut) 裁剪开始时间:\(movieSticker.model_in) 裁剪结束时间:\(movieSticker.out) 路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
-
|
|
|
|
|
|
+
|
|
startReading()
|
|
startReading()
|
|
-//
|
|
|
|
-// if #available(iOS 10.0, *) {
|
|
|
|
-// seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
|
|
|
|
-// } else {
|
|
|
|
-// seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
|
|
|
-// }
|
|
|
|
-// if #available(iOS 10.0, *) {
|
|
|
|
-// seekQueue.activate()
|
|
|
|
-// }
|
|
|
|
|
|
+ //
|
|
|
|
+ // if #available(iOS 10.0, *) {
|
|
|
|
+ // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
|
|
|
|
+ // } else {
|
|
|
|
+ // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
|
|
|
+ // }
|
|
|
|
+ // if #available(iOS 10.0, *) {
|
|
|
|
+ // seekQueue.activate()
|
|
|
|
+ // }
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public override func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) {
|
|
public override func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) {
|
|
super.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex)
|
|
super.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex)
|
|
-
|
|
|
|
-// let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
|
|
|
|
|
|
+
|
|
|
|
+ // let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public override func renderFrame() {
|
|
public override func renderFrame() {
|
|
let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
|
|
let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
|
|
inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
|
|
inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
|
|
-
|
|
|
|
|
|
+
|
|
currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
|
|
currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
|
|
FilterLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
|
|
FilterLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
|
|
-
|
|
|
|
|
|
+
|
|
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
|
|
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
|
|
-
|
|
|
|
|
|
+
|
|
let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(mImageOrientation).textureCoordinates(), texture: inputFramebuffer.texture)
|
|
let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(mImageOrientation).textureCoordinates(), texture: inputFramebuffer.texture)
|
|
-
|
|
|
|
|
|
+
|
|
renderFramebuffer.activateFramebufferForRendering()
|
|
renderFramebuffer.activateFramebufferForRendering()
|
|
clearFramebufferWithColor(backgroundColor)
|
|
clearFramebufferWithColor(backgroundColor)
|
|
renderQuadWithShader(shader, uniformSettings: uniformSettings,
|
|
renderQuadWithShader(shader, uniformSettings: uniformSettings,
|
|
vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties])
|
|
vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties])
|
|
releaseIncomingFramebuffers()
|
|
releaseIncomingFramebuffers()
|
|
-
|
|
|
|
|
|
+
|
|
FilterLog(2, message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
|
|
FilterLog(2, message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
|
|
-
|
|
|
|
|
|
+
|
|
if enableSeek {
|
|
if enableSeek {
|
|
FilterLog(2, message: "seek 到 \(CMTimeGetSeconds(currentTime)) ")
|
|
FilterLog(2, message: "seek 到 \(CMTimeGetSeconds(currentTime)) ")
|
|
resetRangeTime(startTime: currentTime)
|
|
resetRangeTime(startTime: currentTime)
|
|
enableSeek = false
|
|
enableSeek = false
|
|
}
|
|
}
|
|
|
|
|
|
-
|
|
|
|
|
|
+
|
|
if startTimeStamp == nil {
|
|
if startTimeStamp == nil {
|
|
startTimeStamp = currentTime
|
|
startTimeStamp = currentTime
|
|
}
|
|
}
|
|
|
|
|
|
- if CMTimeGetSeconds(currentTime) >= stickerInfo!.timelineIn && CMTimeGetSeconds(currentTime) <= stickerInfo!.timelineOut {
|
|
|
|
-
|
|
|
|
-
|
|
|
|
-
|
|
|
|
- //视频素材开始裁剪时间
|
|
|
|
- let stickerModelIn = CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
-
|
|
|
|
- //要显示的帧时间
|
|
|
|
- let targetTime = CMTimeValue(Int(Float( 1.0 / 30.0 * Float64(framebufferIndex) * Float64(BASE_FILTER_TIMESCALE)) * Float(stickerInfo?.speedRate ?? 1.0)))
|
|
|
|
-
|
|
|
|
- //要显示的帧时间戳
|
|
|
|
- var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
- showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
|
|
|
|
-
|
|
|
|
- FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
|
|
|
|
- readNextVideoFrame(showTimeStamp: showtimeStamp)
|
|
|
|
-
|
|
|
|
- framebufferIndex = framebufferIndex + 1
|
|
|
|
|
|
+ if(isPointModel){
|
|
|
|
+ //视频素材开始裁剪时间
|
|
|
|
+ let stickerModelIn = CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
+
|
|
|
|
+ //要显示的帧时间
|
|
|
|
+ let targetTime = CMTimeValue(Int(Float( 1.0 / 30.0 * Float64(framebufferIndex) * Float64(BASE_FILTER_TIMESCALE)) * Float(stickerInfo?.speedRate ?? 1.0)))
|
|
|
|
+
|
|
|
|
+ //要显示的帧时间戳
|
|
|
|
+ var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
+ showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
|
|
|
|
+
|
|
|
|
+ FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
|
|
|
|
+ readNextVideoFrame(showTimeStamp: showtimeStamp)
|
|
|
|
+
|
|
|
|
+ framebufferIndex = framebufferIndex + 1
|
|
|
|
+ }else{
|
|
|
|
+
|
|
|
|
+ if CMTimeGetSeconds(currentTime) >= stickerInfo!.timelineIn && CMTimeGetSeconds(currentTime) <= stickerInfo!.timelineOut {
|
|
|
|
+
|
|
|
|
+ //视频素材开始裁剪时间
|
|
|
|
+ let stickerModelIn = CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
+
|
|
|
|
+ //要显示的帧时间
|
|
|
|
+ let targetTime = CMTimeValue(Int(Float( 1.0 / 30.0 * Float64(framebufferIndex) * Float64(BASE_FILTER_TIMESCALE)) * Float(stickerInfo?.speedRate ?? 1.0)))
|
|
|
|
+
|
|
|
|
+ //要显示的帧时间戳
|
|
|
|
+ var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
|
|
|
|
+ showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
|
|
|
|
+
|
|
|
|
+ FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
|
|
|
|
+ readNextVideoFrame(showTimeStamp: showtimeStamp)
|
|
|
|
+
|
|
|
|
+ framebufferIndex = framebufferIndex + 1
|
|
|
|
+ }
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
|
|
+
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// 原视频角度类型
|
|
// 原视频角度类型
|
|
public func moveAssetRotation() -> NXGPUImageRotationMode {
|
|
public func moveAssetRotation() -> NXGPUImageRotationMode {
|
|
let Angle: Int = PQPHAssetVideoParaseUtil.videoRotationAngle(assert: asset!)
|
|
let Angle: Int = PQPHAssetVideoParaseUtil.videoRotationAngle(assert: asset!)
|
|
-// FilterLog(2, message: "原视频素材Angle is \(Angle)")
|
|
|
|
|
|
+ // FilterLog(2, message: "原视频素材Angle is \(Angle)")
|
|
// see https://my.oschina.net/NycoWang/blog/904105
|
|
// see https://my.oschina.net/NycoWang/blog/904105
|
|
switch Angle {
|
|
switch Angle {
|
|
case -90, 270:
|
|
case -90, 270:
|
|
@@ -259,42 +279,42 @@ public class PQMovieFilter: PQBaseFilter {
|
|
return .noRotationTextureCoordinates
|
|
return .noRotationTextureCoordinates
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// MARK: -
|
|
// MARK: -
|
|
-
|
|
|
|
|
|
+
|
|
public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, audioSettings: [String: Any]? = nil) throws {
|
|
public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, audioSettings: [String: Any]? = nil) throws {
|
|
asset = AVURLAsset(url: url, options: avAssertOptions)
|
|
asset = AVURLAsset(url: url, options: avAssertOptions)
|
|
-
|
|
|
|
|
|
+
|
|
if asset != nil {
|
|
if asset != nil {
|
|
stickerFPS = asset!.tracks(withMediaType: .video).first?.nominalFrameRate ?? 0.0
|
|
stickerFPS = asset!.tracks(withMediaType: .video).first?.nominalFrameRate ?? 0.0
|
|
let bitRate = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
|
|
let bitRate = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
|
|
-
|
|
|
|
|
|
+
|
|
FilterLog(2, message: "move filter asset fps is \(String(describing: stickerFPS)) bit rate is \(bitRate ?? 0)")
|
|
FilterLog(2, message: "move filter asset fps is \(String(describing: stickerFPS)) bit rate is \(bitRate ?? 0)")
|
|
-
|
|
|
|
|
|
+
|
|
self.videoComposition = videoComposition
|
|
self.videoComposition = videoComposition
|
|
self.playAtActualSpeed = playAtActualSpeed
|
|
self.playAtActualSpeed = playAtActualSpeed
|
|
-
|
|
|
|
|
|
+
|
|
yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
|
|
yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
|
|
self.audioSettings = audioSettings
|
|
self.audioSettings = audioSettings
|
|
} else { FilterLog(2, message: "asset is nil") }
|
|
} else { FilterLog(2, message: "asset is nil") }
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// MARK: -
|
|
// MARK: -
|
|
-
|
|
|
|
|
|
+
|
|
// MARK: Internal processing functions
|
|
// MARK: Internal processing functions
|
|
-
|
|
|
|
|
|
+
|
|
public func createReader() -> AVAssetReader? {
|
|
public func createReader() -> AVAssetReader? {
|
|
do {
|
|
do {
|
|
let outputSettings: [String: AnyObject] =
|
|
let outputSettings: [String: AnyObject] =
|
|
[kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
|
|
[kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
|
|
-
|
|
|
|
|
|
+
|
|
assetReader = try AVAssetReader(asset: asset!)
|
|
assetReader = try AVAssetReader(asset: asset!)
|
|
-
|
|
|
|
|
|
+
|
|
let videoTrack: AVAssetTrack = asset!.tracks(withMediaType: .video).first!
|
|
let videoTrack: AVAssetTrack = asset!.tracks(withMediaType: .video).first!
|
|
-
|
|
|
|
|
|
+
|
|
videoSize = videoTrack.naturalSize
|
|
videoSize = videoTrack.naturalSize
|
|
FilterLog(2, message: "视频大小为 : \(videoSize)")
|
|
FilterLog(2, message: "视频大小为 : \(videoSize)")
|
|
-
|
|
|
|
|
|
+
|
|
if videoComposition == nil {
|
|
if videoComposition == nil {
|
|
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset!.tracks(withMediaType: .video).first!, outputSettings: outputSettings)
|
|
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset!.tracks(withMediaType: .video).first!, outputSettings: outputSettings)
|
|
readerVideoTrackOutput.alwaysCopiesSampleData = false
|
|
readerVideoTrackOutput.alwaysCopiesSampleData = false
|
|
@@ -306,31 +326,31 @@ public class PQMovieFilter: PQBaseFilter {
|
|
assetReader!.add(readerVideoTrackOutput)
|
|
assetReader!.add(readerVideoTrackOutput)
|
|
}
|
|
}
|
|
assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE))
|
|
assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE))
|
|
-
|
|
|
|
|
|
+
|
|
FilterLog(2, message: "set assetReader!.timeRange is \(assetReader!.timeRange)")
|
|
FilterLog(2, message: "set assetReader!.timeRange is \(assetReader!.timeRange)")
|
|
-
|
|
|
|
|
|
+
|
|
return assetReader
|
|
return assetReader
|
|
} catch {
|
|
} catch {
|
|
debugPrint("ERROR: Unable to create asset reader: \(error)")
|
|
debugPrint("ERROR: Unable to create asset reader: \(error)")
|
|
}
|
|
}
|
|
return nil
|
|
return nil
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public func startReading() {
|
|
public func startReading() {
|
|
FilterLog(2, message: "开始初始化")
|
|
FilterLog(2, message: "开始初始化")
|
|
mach_timebase_info(&timebaseInfo)
|
|
mach_timebase_info(&timebaseInfo)
|
|
-
|
|
|
|
|
|
+
|
|
assetReader?.cancelReading()
|
|
assetReader?.cancelReading()
|
|
-
|
|
|
|
|
|
+
|
|
guard let assetReader = createReader() else {
|
|
guard let assetReader = createReader() else {
|
|
return // A return statement in this frame will end thread execution.
|
|
return // A return statement in this frame will end thread execution.
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
do {
|
|
do {
|
|
try NSObject.catchException {
|
|
try NSObject.catchException {
|
|
guard assetReader.startReading() else {
|
|
guard assetReader.startReading() else {
|
|
#if DEBUG
|
|
#if DEBUG
|
|
- cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
|
|
|
|
|
|
+ cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
|
|
#endif
|
|
#endif
|
|
debugPrint("ERROR: Unable to start reading: \(String(describing: assetReader.error))")
|
|
debugPrint("ERROR: Unable to start reading: \(String(describing: assetReader.error))")
|
|
return
|
|
return
|
|
@@ -341,14 +361,14 @@ public class PQMovieFilter: PQBaseFilter {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// 设置解码开始时间
|
|
// 设置解码开始时间
|
|
public func resetRangeTime(startTime: CMTime = .zero) {
|
|
public func resetRangeTime(startTime: CMTime = .zero) {
|
|
FilterLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
|
|
FilterLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
|
|
requestedStartTime = startTime
|
|
requestedStartTime = startTime
|
|
startReading()
|
|
startReading()
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// 取出第一帧数据
|
|
// 取出第一帧数据
|
|
public func readNextVideoFrame(showTimeStamp: CMTime) {
|
|
public func readNextVideoFrame(showTimeStamp: CMTime) {
|
|
// XXXX 有时渲染视频取出来的画面时为黑屏,再渲染一次,数据是没有问题已经保存到沙盒进行验证,这个不是最好的方案!
|
|
// XXXX 有时渲染视频取出来的画面时为黑屏,再渲染一次,数据是没有问题已经保存到沙盒进行验证,这个不是最好的方案!
|
|
@@ -357,29 +377,29 @@ public class PQMovieFilter: PQBaseFilter {
|
|
}
|
|
}
|
|
|
|
|
|
if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
-
|
|
|
|
- // 最后一帧的PTS > 要显示的目标时间 就不从解码器要数据,直接返回 view 不刷新 只有慢速时会调用
|
|
|
|
-// if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) + (stickerInfo?.model_in ?? 0) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
|
|
|
|
+
|
|
|
|
+ // 最后一帧的PTS > 要显示的目标时间 就不从解码器要数据,直接返回 view 不刷新 只有慢速时会调用
|
|
|
|
+ // if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) + (stickerInfo?.model_in ?? 0) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
FilterLog(2, message: "28797speedRate 目标显示时间 \(String(format: "%.6f", (CMTimeGetSeconds(showTimeStamp)))) 最后显示的时间 \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) speedRate is \(stickerInfo!.speedRate)")
|
|
FilterLog(2, message: "28797speedRate 目标显示时间 \(String(format: "%.6f", (CMTimeGetSeconds(showTimeStamp)))) 最后显示的时间 \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) speedRate is \(stickerInfo!.speedRate)")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
if assetReader == nil {
|
|
if assetReader == nil {
|
|
FilterLog(2, message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
|
|
FilterLog(2, message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
var videoTrackOutput: AVAssetReaderOutput?
|
|
var videoTrackOutput: AVAssetReaderOutput?
|
|
for output in assetReader!.outputs {
|
|
for output in assetReader!.outputs {
|
|
if output.mediaType == AVMediaType.video {
|
|
if output.mediaType == AVMediaType.video {
|
|
videoTrackOutput = output
|
|
videoTrackOutput = output
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
-
|
|
|
|
|
|
+
|
|
var sampleBuffer: CMSampleBuffer?
|
|
var sampleBuffer: CMSampleBuffer?
|
|
-
|
|
|
|
|
|
+
|
|
// 日志使用 count
|
|
// 日志使用 count
|
|
var count: Int = 0
|
|
var count: Int = 0
|
|
while assetReader?.status == .reading {
|
|
while assetReader?.status == .reading {
|
|
@@ -390,30 +410,30 @@ public class PQMovieFilter: PQBaseFilter {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
|
|
targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
|
|
-
|
|
|
|
|
|
+
|
|
// 目标帧 时间
|
|
// 目标帧 时间
|
|
if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) {
|
|
if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) {
|
|
let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
|
|
|
FilterLog(2, message: " 28797speedRate is \(stickerInfo!.speedRate) 当前主线时间为:\(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 要命中时间:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 差值\(CMTimeGetSeconds(targetTimeStamp) - (stickerInfo?.model_in ?? 0)) 查找耗时为:\(String(format: "%.6f", TimeInterval(endDecoderTime - beginDecoderTime))) 查找次数\(count) 进场时间: \(String(describing: moveSticker?.timelineIn)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) 裁剪结束时间:\(String(describing: moveSticker?.out)) 原视频时长: \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
FilterLog(2, message: " 28797speedRate is \(stickerInfo!.speedRate) 当前主线时间为:\(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 要命中时间:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 差值\(CMTimeGetSeconds(targetTimeStamp) - (stickerInfo?.model_in ?? 0)) 查找耗时为:\(String(format: "%.6f", TimeInterval(endDecoderTime - beginDecoderTime))) 查找次数\(count) 进场时间: \(String(describing: moveSticker?.timelineIn)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) 裁剪结束时间:\(String(describing: moveSticker?.out)) 原视频时长: \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
break
|
|
break
|
|
-
|
|
|
|
|
|
+
|
|
}
|
|
}
|
|
-// else {
|
|
|
|
-// FilterLog(2, message: "不丢帧显示 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
|
|
|
|
-//// usleep(2)
|
|
|
|
-//// sharedImageProcessingContext.runOperationSynchronously {
|
|
|
|
-//// self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
|
|
-//// }
|
|
|
|
-// break
|
|
|
|
-// }
|
|
|
|
|
|
+ // else {
|
|
|
|
+ // FilterLog(2, message: "不丢帧显示 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
|
|
|
|
+ //// usleep(2)
|
|
|
|
+ //// sharedImageProcessingContext.runOperationSynchronously {
|
|
|
|
+ //// self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
|
|
+ //// }
|
|
|
|
+ // break
|
|
|
|
+ // }
|
|
}
|
|
}
|
|
// 一,显示命中的帧数据
|
|
// 一,显示命中的帧数据
|
|
if sampleBuffer != nil {
|
|
if sampleBuffer != nil {
|
|
-// if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
|
|
|
|
+ // if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
|
|
lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
|
|
-// }
|
|
|
|
-
|
|
|
|
|
|
+ // }
|
|
|
|
+
|
|
sharedImageProcessingContext.runOperationSynchronously { [weak self] in
|
|
sharedImageProcessingContext.runOperationSynchronously { [weak self] in
|
|
self?.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
self?.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
}
|
|
}
|
|
@@ -427,9 +447,9 @@ public class PQMovieFilter: PQBaseFilter {
|
|
// 1 自动循环模式 重头开始循环
|
|
// 1 自动循环模式 重头开始循环
|
|
if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
|
|
if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
|
|
FilterLog(2, message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
|
|
FilterLog(2, message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
|
|
-
|
|
|
|
|
|
+
|
|
startReading()
|
|
startReading()
|
|
-
|
|
|
|
|
|
+
|
|
} else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
} else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
// 2),定帧处理
|
|
// 2),定帧处理
|
|
if lastImageBuffer != nil {
|
|
if lastImageBuffer != nil {
|
|
@@ -445,7 +465,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
/// 渲染帧数据
|
|
/// 渲染帧数据
|
|
/// - Parameters:
|
|
/// - Parameters:
|
|
/// - movieFrame:帧数据
|
|
/// - movieFrame:帧数据
|
|
@@ -453,33 +473,33 @@ public class PQMovieFilter: PQBaseFilter {
|
|
public func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
|
|
public func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
|
|
// NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
|
|
// NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
|
|
FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
|
|
FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
|
|
-
|
|
|
|
|
|
+
|
|
let bufferHeight = CVPixelBufferGetHeight(movieFrame)
|
|
let bufferHeight = CVPixelBufferGetHeight(movieFrame)
|
|
let bufferWidth = CVPixelBufferGetWidth(movieFrame)
|
|
let bufferWidth = CVPixelBufferGetWidth(movieFrame)
|
|
CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
-
|
|
|
|
|
|
+
|
|
let conversionMatrix = colorConversionMatrix601FullRangeDefault
|
|
let conversionMatrix = colorConversionMatrix601FullRangeDefault
|
|
-
|
|
|
|
|
|
+
|
|
// 1 Y-plane
|
|
// 1 Y-plane
|
|
var luminanceGLTexture: CVOpenGLESTexture?
|
|
var luminanceGLTexture: CVOpenGLESTexture?
|
|
-
|
|
|
|
|
|
+
|
|
// 激活纹理
|
|
// 激活纹理
|
|
glActiveTexture(GLenum(GL_TEXTURE0))
|
|
glActiveTexture(GLenum(GL_TEXTURE0))
|
|
-
|
|
|
|
|
|
+
|
|
let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture)
|
|
let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture)
|
|
-
|
|
|
|
|
|
+
|
|
if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil {
|
|
if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil {
|
|
debugPrint("ERROR: Could not create LuminanceGLTexture")
|
|
debugPrint("ERROR: Could not create LuminanceGLTexture")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!)
|
|
let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!)
|
|
-
|
|
|
|
|
|
+
|
|
// 绑定纹理
|
|
// 绑定纹理
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture)
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture)
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
-
|
|
|
|
|
|
+
|
|
let luminanceFramebuffer: Framebuffer
|
|
let luminanceFramebuffer: Framebuffer
|
|
do {
|
|
do {
|
|
luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture)
|
|
luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture)
|
|
@@ -488,25 +508,25 @@ public class PQMovieFilter: PQBaseFilter {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
-
|
|
|
|
|
|
+
|
|
// 2 UV-plane.
|
|
// 2 UV-plane.
|
|
var chrominanceGLTexture: CVOpenGLESTexture?
|
|
var chrominanceGLTexture: CVOpenGLESTexture?
|
|
-
|
|
|
|
|
|
+
|
|
glActiveTexture(GLenum(GL_TEXTURE1))
|
|
glActiveTexture(GLenum(GL_TEXTURE1))
|
|
-
|
|
|
|
|
|
+
|
|
let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture)
|
|
let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture)
|
|
-
|
|
|
|
|
|
+
|
|
if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil {
|
|
if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil {
|
|
debugPrint("ERROR: Could not create ChrominanceGLTexture")
|
|
debugPrint("ERROR: Could not create ChrominanceGLTexture")
|
|
return
|
|
return
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!)
|
|
let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!)
|
|
-
|
|
|
|
|
|
+
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture)
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture)
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
-
|
|
|
|
|
|
+
|
|
let chrominanceFramebuffer: Framebuffer
|
|
let chrominanceFramebuffer: Framebuffer
|
|
do {
|
|
do {
|
|
chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture)
|
|
chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture)
|
|
@@ -515,21 +535,21 @@ public class PQMovieFilter: PQBaseFilter {
|
|
return
|
|
return
|
|
}
|
|
}
|
|
chrominanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
chrominanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
-
|
|
|
|
|
|
+
|
|
self.movieFramebuffer?.unlock()
|
|
self.movieFramebuffer?.unlock()
|
|
let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true)
|
|
let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true)
|
|
movieFramebuffer.lock()
|
|
movieFramebuffer.lock()
|
|
-
|
|
|
|
|
|
+
|
|
convertYUVToRGBAK(shader: yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix)
|
|
convertYUVToRGBAK(shader: yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix)
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
-
|
|
|
|
|
|
+
|
|
FilterLog(2, message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
|
|
FilterLog(2, message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
|
|
movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
-
|
|
|
|
|
|
+
|
|
movieFramebuffer.userInfo = framebufferUserInfo
|
|
movieFramebuffer.userInfo = framebufferUserInfo
|
|
self.movieFramebuffer = movieFramebuffer
|
|
self.movieFramebuffer = movieFramebuffer
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public func convertYUVToRGBAK(shader: ShaderProgram, luminanceFramebuffer: Framebuffer, chrominanceFramebuffer: Framebuffer, secondChrominanceFramebuffer: Framebuffer? = nil, resultFramebuffer: Framebuffer, colorConversionMatrix: Matrix3x3) {
|
|
public func convertYUVToRGBAK(shader: ShaderProgram, luminanceFramebuffer: Framebuffer, chrominanceFramebuffer: Framebuffer, secondChrominanceFramebuffer: Framebuffer? = nil, resultFramebuffer: Framebuffer, colorConversionMatrix: Matrix3x3) {
|
|
let textureProperties: [InputTextureProperties]
|
|
let textureProperties: [InputTextureProperties]
|
|
if let secondChrominanceFramebuffer = secondChrominanceFramebuffer {
|
|
if let secondChrominanceFramebuffer = secondChrominanceFramebuffer {
|
|
@@ -537,22 +557,22 @@ public class PQMovieFilter: PQBaseFilter {
|
|
} else {
|
|
} else {
|
|
textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
|
|
textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
var cropTextureProperties: [InputTextureProperties] = Array()
|
|
var cropTextureProperties: [InputTextureProperties] = Array()
|
|
for texture in textureProperties {
|
|
for texture in textureProperties {
|
|
let textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: moveSticker!, textureSize: videoSize, rotationMode: moveAssetRotation(), cannvasSize: inputSize)
|
|
let textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: moveSticker!, textureSize: videoSize, rotationMode: moveAssetRotation(), cannvasSize: inputSize)
|
|
-
|
|
|
|
|
|
+
|
|
let texturePropertiesimagetwo = InputTextureProperties(textureCoordinates: textureCoordinates, texture: texture.texture)
|
|
let texturePropertiesimagetwo = InputTextureProperties(textureCoordinates: textureCoordinates, texture: texture.texture)
|
|
cropTextureProperties.append(texturePropertiesimagetwo)
|
|
cropTextureProperties.append(texturePropertiesimagetwo)
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
// 清空背景色
|
|
// 清空背景色
|
|
// clearFramebufferWithColor(Color(red:0, green:0, blue:0, alpha:1.0))
|
|
// clearFramebufferWithColor(Color(red:0, green:0, blue:0, alpha:1.0))
|
|
let uniformSettings = ShaderUniformSettings()
|
|
let uniformSettings = ShaderUniformSettings()
|
|
uniformSettings["colorConversionMatrix"] = colorConversionMatrix
|
|
uniformSettings["colorConversionMatrix"] = colorConversionMatrix
|
|
-
|
|
|
|
|
|
+
|
|
let verticesPoint: [GLfloat] = PQGPUImageTools.getVerticesPoint(sticker: moveSticker!, textureSize: (moveAssetRotation() == .rotateLeftTextureCoordinates || moveAssetRotation() == .rotateRightTextureCoordinates) ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize, cannvasSize: inputSize)
|
|
let verticesPoint: [GLfloat] = PQGPUImageTools.getVerticesPoint(sticker: moveSticker!, textureSize: (moveAssetRotation() == .rotateLeftTextureCoordinates || moveAssetRotation() == .rotateRightTextureCoordinates) ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize, cannvasSize: inputSize)
|
|
-
|
|
|
|
|
|
+
|
|
imageVertexBuffer = PQGPUImageTools.NXGenerateVBO(for: verticesPoint)
|
|
imageVertexBuffer = PQGPUImageTools.NXGenerateVBO(for: verticesPoint)
|
|
renderQuadWithShader(shader,
|
|
renderQuadWithShader(shader,
|
|
uniformSettings: uniformSettings,
|
|
uniformSettings: uniformSettings,
|
|
@@ -564,12 +584,12 @@ public class PQMovieFilter: PQBaseFilter {
|
|
PQGPUImageTools.deleteVBO(imageVertexBuffer)
|
|
PQGPUImageTools.deleteVBO(imageVertexBuffer)
|
|
imageVertexBuffer = 0
|
|
imageVertexBuffer = 0
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
luminanceFramebuffer.unlock()
|
|
luminanceFramebuffer.unlock()
|
|
chrominanceFramebuffer.unlock()
|
|
chrominanceFramebuffer.unlock()
|
|
secondChrominanceFramebuffer?.unlock()
|
|
secondChrominanceFramebuffer?.unlock()
|
|
}
|
|
}
|
|
-
|
|
|
|
|
|
+
|
|
public func nanosToAbs(_ nanos: UInt64) -> UInt64 {
|
|
public func nanosToAbs(_ nanos: UInt64) -> UInt64 {
|
|
return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
|
|
return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
|
|
}
|
|
}
|
|
@@ -579,21 +599,21 @@ public class PQMovieFilter: PQBaseFilter {
|
|
import VideoToolbox
|
|
import VideoToolbox
|
|
|
|
|
|
extension UIImage {
|
|
extension UIImage {
|
|
-// public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
|
-// var cgImage: CGImage?
|
|
|
|
-// VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
|
-//
|
|
|
|
-// guard let cgImage = cgImage else {
|
|
|
|
-// return nil
|
|
|
|
-// }
|
|
|
|
-//
|
|
|
|
-// self.init(cgImage: cgImage)
|
|
|
|
-// }
|
|
|
|
-
|
|
|
|
- public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
|
|
|
|
|
|
+ // public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
|
+ // var cgImage: CGImage?
|
|
|
|
+ // VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
|
+ //
|
|
|
|
+ // guard let cgImage = cgImage else {
|
|
|
|
+ // return nil
|
|
|
|
+ // }
|
|
|
|
+ //
|
|
|
|
+ // self.init(cgImage: cgImage)
|
|
|
|
+ // }
|
|
|
|
+
|
|
|
|
+ public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
|
|
if let imageData = currentImage.jpegData(compressionQuality: persent) {
|
|
if let imageData = currentImage.jpegData(compressionQuality: persent) {
|
|
let fullPath = NSHomeDirectory().appending("/Documents/").appending(imageName)
|
|
let fullPath = NSHomeDirectory().appending("/Documents/").appending(imageName)
|
|
-
|
|
|
|
|
|
+
|
|
try? imageData.write(to: URL(fileURLWithPath: fullPath))
|
|
try? imageData.write(to: URL(fileURLWithPath: fullPath))
|
|
print("fullPath=\(fullPath)")
|
|
print("fullPath=\(fullPath)")
|
|
}
|
|
}
|