|
@@ -12,7 +12,7 @@
|
|
|
|
|
|
/*
|
|
|
AVAssetReader可用于读取AVAsset媒体资源的轨道数据,支持解码、格式转换、mix等操作。但注意事项也不少:
|
|
|
-
|
|
|
+
|
|
|
AVAssetReader不可重复调用startReading,当出现fail或complete状态后也不能重复调用;
|
|
|
AVAssetReader做解码的时候,切换后台/来电会失去GPU权限,造成解码失败,AVAssetReader也变成fail状态。异常打断结束后,需要重启reader,并确定reader重启成功,否则需要retry;
|
|
|
AVAssetReader启动后调用seek时,并不会很精准seek到目标点,一般会比指定的时间早几帧(AVPlayer的精准seek,也有同样的问题),需要记录seek的目标时间点,如果seek后读取出的buffer携带的 pts比seek的目标时间小,需要抛弃该数据;
|
|
@@ -23,30 +23,30 @@
|
|
|
AVAssetReader不支持m3u8文件,回出现读取不到轨道信息的情况,如果需要解析HLS视频,需要使用FFMpeg进行解封装和VideoToolBox解码;
|
|
|
AVAssetReader内部创建了解码器和缓存列表,但解码器数量是有限制的(同AVPlayerItem)。
|
|
|
当然AVAssetReader 只做demux,不做解码工作时可以避免上述一些问题,但需要自行使用VideoToolBox进行硬解,pixel format转换也得单独处理。
|
|
|
-
|
|
|
+
|
|
|
有多种方法可以创建CMTime实例,不过最常见的方法是使用CMTimeMake函数,指定一个64位的value参数和32位的timescale参数,比如,创建一个代表5s的CMTime表达式有下面几种不同的方式:
|
|
|
CMTime t1 =CMTimeMake(5, 1);
|
|
|
CMTime t2 =CMTimeMake(3000, 600);
|
|
|
CMTime t3 =CMTimeMake(5000, 1000);
|
|
|
-
|
|
|
+
|
|
|
二,seek 操作
|
|
|
-
|
|
|
+
|
|
|
AVAssetReader并不适合频繁随机读取的操作,如果需要频繁seek可能需要别的方式实现。
|
|
|
在开始读取之前,可以对读取的范围进行设置,当开始读取后不可以修改,只能顺序向后读。
|
|
|
有两种方案来调整读取范围:
|
|
|
-
|
|
|
+
|
|
|
output中可以设置supportsRandomAccess,当为true时,可以重置读取范围,但需要调用方调用copyNextSampleBuffer,直到该方法返回NULL。
|
|
|
或者重新初始化一个AVAssetReader来设置读取时间。
|
|
|
如果尝试第一种方案,需要使用seek,可以尝试每次设置一个不太长的区间,以保证读取完整个区间不会耗时太多,且时间间隔最好以关键帧划分。
|
|
|
-
|
|
|
+
|
|
|
fps
|
|
|
-
|
|
|
+
|
|
|
25.0 fps : 0.0000 0.0400 0.0800 0.1200 0.1600 0.2000 0.2400 0.2800 0.3200 0.3600 0.4000 0.4400 0.4800 0.5200 0.5600 0.6000 0.6400 0.6800 0.7200 0.7600 0.8000 0.8400 0.8800 0.9200 0.9600 1.0000 1.0400 1.0800 1.1200 1.1600 1.2000
|
|
|
30.0 fps : 0.0000 0.0333 0.0667 0.1000 0.1333 0.1667 0.2000 0.2333 0.2667 0.3000 0.3333 0.3667 0.4000 0.4333 0.4667 0.5000 0.5333 0.5667 0.6000 0.6333 0.6667 0.7000 0.7333 0.7667 0.8000 0.8333 0.8667 0.9000 0.9333 0.9667 1.0000
|
|
|
60.0 fps : 0.0000 0.0167 0.0333 0.0500 0.0667 0.0833 0.1000 0.1167 0.1333 0.1500 0.1667 0.1833 0.2000 0.2167 0.2333 0.2500 0.2667 0.2833 0.3000 0.3167 0.3333 0.3500 0.3667 0.3833 0.4000 0.4167 0.4333 0.4500 0.4667 0.4833 0.5000
|
|
|
80.0 fps : 0.0000 0.0125 0.0250 0.0375 0.0500 0.0625 0.0750 0.0875 0.1000 0.1125 0.1250 0.1375 0.1500 0.1625 0.1750 0.1875 0.2000 0.2125 0.2250 0.2375 0.2500 0.2625 0.2750 0.2875 0.3000 0.3125 0.3250 0.3375 0.3500 0.3625 0.3750
|
|
|
120.0 fps : 0.0000 0.0083 0.0167 0.0250 0.0333 0.0417 0.0500 0.0583 0.0667 0.0750 0.0833 0.0917 0.1000 0.1083 0.1167 0.1250 0.1333 0.1417 0.1500 0.1583 0.1667 0.1750 0.1833 0.1917 0.2000 0.2083 0.2167 0.2250 0.2333 0.2417 0.2500
|
|
|
-
|
|
|
+
|
|
|
*/
|
|
|
|
|
|
import Foundation
|
|
@@ -54,36 +54,36 @@ import UIKit
|
|
|
|
|
|
public class PQMovieFilter: PQBaseFilter {
|
|
|
public var runBenchmark = false
|
|
|
-
|
|
|
+
|
|
|
public weak var delegate: MovieInputDelegate?
|
|
|
-
|
|
|
+
|
|
|
public var yuvConversionShader: ShaderProgram?
|
|
|
public var asset: AVAsset?
|
|
|
public var videoComposition: AVVideoComposition?
|
|
|
// 使用原始速度
|
|
|
public var playAtActualSpeed: Bool = true
|
|
|
-
|
|
|
+
|
|
|
// Time in the video where it should start.
|
|
|
public var requestedStartTime: CMTime?
|
|
|
-
|
|
|
+
|
|
|
// Last sample time that played.
|
|
|
public private(set) var currentTime: CMTime = .zero
|
|
|
-
|
|
|
+
|
|
|
// Progress block of the video with a paramater value of 0-1.
|
|
|
// Can be used to check video encoding progress. Not called from main thread.
|
|
|
public var progress: ((Double) -> Void)?
|
|
|
-
|
|
|
+
|
|
|
public var audioSettings: [String: Any]?
|
|
|
-
|
|
|
+
|
|
|
public var movieFramebuffer: Framebuffer?
|
|
|
public var framebufferUserInfo: [AnyHashable: Any]?
|
|
|
-
|
|
|
+
|
|
|
@Atomic var assetReader: AVAssetReader?
|
|
|
-
|
|
|
+
|
|
|
public var moveSticker: PQEditVisionTrackMaterialsModel?
|
|
|
-
|
|
|
+
|
|
|
public var videoSize: CGSize = .zero
|
|
|
-
|
|
|
+
|
|
|
// 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
|
|
|
public var lastImageBuffer: CVImageBuffer?
|
|
|
//
|
|
@@ -92,18 +92,18 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
public var currentRenderSampleBuffer: CMSampleBuffer?
|
|
|
// 旋转角度值
|
|
|
public var mImageOrientation: ImageOrientation = .portrait
|
|
|
-
|
|
|
+
|
|
|
public var inputSize: GLSize = GLSize(width: 0, height: 0)
|
|
|
-
|
|
|
+
|
|
|
public var timebaseInfo = mach_timebase_info_data_t()
|
|
|
-
|
|
|
+
|
|
|
public var currentThread: Thread?
|
|
|
/// Use serial queue to ensure that the picture is smooth
|
|
|
-// var seekQueue: DispatchQueue!
|
|
|
-
|
|
|
+ // var seekQueue: DispatchQueue!
|
|
|
+
|
|
|
// 原视频素材的 FPS
|
|
|
public var stickerFPS: Float = 0
|
|
|
-
|
|
|
+
|
|
|
// 开始时间,创建 filter 显示的时候有
|
|
|
public var startTimeStamp: CMTime?
|
|
|
// 最后一次显示帧时间戳
|
|
@@ -113,12 +113,12 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
public var framebufferIndex:Int = 0
|
|
|
|
|
|
public var imageVertexBuffer: GLuint = 0
|
|
|
-
|
|
|
+
|
|
|
deinit {
|
|
|
FilterLog(1, message: "movie filter release")
|
|
|
clearData()
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public override func clearData() {
|
|
|
super.clearData()
|
|
|
if assetReader != nil {
|
|
@@ -130,18 +130,18 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
imageVertexBuffer = 0
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public init(url: URL) {
|
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
|
-
|
|
|
+
|
|
|
do {
|
|
|
try loadAsset(url: url, videoComposition: nil)
|
|
|
-
|
|
|
+
|
|
|
} catch {
|
|
|
NXLog(message: "load asset with error: \(error)")
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public init(movieSticker: PQEditVisionTrackMaterialsModel) {
|
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
|
moveSticker = movieSticker
|
|
@@ -150,100 +150,98 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
if moveSticker!.videoIsCrop() {
|
|
|
requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
do {
|
|
|
// 测试代码
|
|
|
// try loadAsset(url:URL(fileURLWithPath:"22222.MP4", relativeTo:Bundle.main.resourceURL!), videoComposition: nil)
|
|
|
/* locationPath 有可能直接使用系统相册地址 处理不同 IOS 版本 路径有所区别 1,e.g.视频地址 var/mobile/Media/DCIM/125APPLE/IMG_5189.MOV 就不用拼接沙盒地址了
|
|
|
- 2,try find move file from bfframework bundle e.g. 库 bundle 的地址 "/var/containers/Bundle/Application/AD663220-6AF2-4841-AF82-071C10D78959/MusicVideoPlus.app/BFFramework.bundle/endMovieA.mp4"
|
|
|
- */
|
|
|
+ 2,try find move file from bfframework bundle e.g. 库 bundle 的地址 "/var/containers/Bundle/Application/AD663220-6AF2-4841-AF82-071C10D78959/MusicVideoPlus.app/BFFramework.bundle/endMovieA.mp4"
|
|
|
+ */
|
|
|
var videoFilePath = movieSticker.locationPath
|
|
|
if (!videoFilePath.contains("var/mobile/Media")) && (!videoFilePath.contains("BFFramework_Resources.bundle")) {
|
|
|
videoFilePath = documensDirectory + videoFilePath
|
|
|
}
|
|
|
FilterLog(2, message: "视频地址 \(String(describing: videoFilePath))")
|
|
|
try loadAsset(url: URL(fileURLWithPath: videoFilePath), videoComposition: nil)
|
|
|
-
|
|
|
+
|
|
|
} catch {
|
|
|
NXLog(message: "load asset with error: \(error)")
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
FilterLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut) 裁剪开始时间:\(movieSticker.model_in) 裁剪结束时间:\(movieSticker.out) 路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
|
-
|
|
|
+
|
|
|
startReading()
|
|
|
-//
|
|
|
-// if #available(iOS 10.0, *) {
|
|
|
-// seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
|
|
|
-// } else {
|
|
|
-// seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
|
|
-// }
|
|
|
-// if #available(iOS 10.0, *) {
|
|
|
-// seekQueue.activate()
|
|
|
-// }
|
|
|
+ //
|
|
|
+ // if #available(iOS 10.0, *) {
|
|
|
+ // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
|
|
|
+ // } else {
|
|
|
+ // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
|
|
+ // }
|
|
|
+ // if #available(iOS 10.0, *) {
|
|
|
+ // seekQueue.activate()
|
|
|
+ // }
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public override func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) {
|
|
|
super.newFramebufferAvailable(framebuffer, fromSourceIndex: fromSourceIndex)
|
|
|
-
|
|
|
-// let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
|
|
|
+
|
|
|
+ // let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public override func renderFrame() {
|
|
|
let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
|
|
|
inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
|
|
|
-
|
|
|
+
|
|
|
currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
|
|
|
FilterLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
|
|
|
-
|
|
|
+
|
|
|
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
|
|
|
-
|
|
|
+
|
|
|
let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(mImageOrientation).textureCoordinates(), texture: inputFramebuffer.texture)
|
|
|
-
|
|
|
+
|
|
|
renderFramebuffer.activateFramebufferForRendering()
|
|
|
clearFramebufferWithColor(backgroundColor)
|
|
|
renderQuadWithShader(shader, uniformSettings: uniformSettings,
|
|
|
vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties])
|
|
|
releaseIncomingFramebuffers()
|
|
|
-
|
|
|
+
|
|
|
FilterLog(2, message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
|
|
|
-
|
|
|
+
|
|
|
if enableSeek {
|
|
|
FilterLog(2, message: "seek 到 \(CMTimeGetSeconds(currentTime)) ")
|
|
|
resetRangeTime(startTime: currentTime)
|
|
|
enableSeek = false
|
|
|
}
|
|
|
|
|
|
-
|
|
|
+
|
|
|
if startTimeStamp == nil {
|
|
|
startTimeStamp = currentTime
|
|
|
}
|
|
|
|
|
|
if CMTimeGetSeconds(currentTime) >= stickerInfo!.timelineIn && CMTimeGetSeconds(currentTime) <= stickerInfo!.timelineOut {
|
|
|
-
|
|
|
-
|
|
|
-
|
|
|
- //视频素材开始裁剪时间
|
|
|
- let stickerModelIn = CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE)
|
|
|
-
|
|
|
- //要显示的帧时间
|
|
|
- let targetTime = CMTimeValue(Int(Float( 1.0 / 30.0 * Float64(framebufferIndex) * Float64(BASE_FILTER_TIMESCALE)) * Float(stickerInfo?.speedRate ?? 1.0)))
|
|
|
-
|
|
|
- //要显示的帧时间戳
|
|
|
- var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
|
|
|
- showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
|
|
|
-
|
|
|
- FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
|
|
|
- readNextVideoFrame(showTimeStamp: showtimeStamp)
|
|
|
-
|
|
|
- framebufferIndex = framebufferIndex + 1
|
|
|
+
|
|
|
+ //视频素材开始裁剪时间
|
|
|
+ let stickerModelIn = CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE)
|
|
|
+
|
|
|
+ //要显示的帧时间
|
|
|
+ let targetTime = CMTimeValue(Int(Float( 1.0 / 30.0 * Float64(framebufferIndex) * Float64(BASE_FILTER_TIMESCALE)) * Float(stickerInfo?.speedRate ?? 1.0)))
|
|
|
+
|
|
|
+ //要显示的帧时间戳
|
|
|
+ var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
|
|
|
+ showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
|
|
|
+
|
|
|
+ FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
|
|
|
+ readNextVideoFrame(showTimeStamp: showtimeStamp)
|
|
|
+
|
|
|
+ framebufferIndex = framebufferIndex + 1
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// 原视频角度类型
|
|
|
public func moveAssetRotation() -> NXGPUImageRotationMode {
|
|
|
let Angle: Int = PQPHAssetVideoParaseUtil.videoRotationAngle(assert: asset!)
|
|
|
-// FilterLog(2, message: "原视频素材Angle is \(Angle)")
|
|
|
+ // FilterLog(2, message: "原视频素材Angle is \(Angle)")
|
|
|
// see https://my.oschina.net/NycoWang/blog/904105
|
|
|
switch Angle {
|
|
|
case -90, 270:
|
|
@@ -258,42 +256,42 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
return .noRotationTextureCoordinates
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// MARK: -
|
|
|
-
|
|
|
+
|
|
|
public func loadAsset(url: URL, videoComposition: AVVideoComposition?, playAtActualSpeed: Bool = true, audioSettings: [String: Any]? = nil) throws {
|
|
|
asset = AVURLAsset(url: url, options: avAssertOptions)
|
|
|
-
|
|
|
+
|
|
|
if asset != nil {
|
|
|
stickerFPS = asset!.tracks(withMediaType: .video).first?.nominalFrameRate ?? 0.0
|
|
|
let bitRate = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
|
|
|
-
|
|
|
+
|
|
|
FilterLog(2, message: "move filter asset fps is \(String(describing: stickerFPS)) bit rate is \(bitRate ?? 0)")
|
|
|
-
|
|
|
+
|
|
|
self.videoComposition = videoComposition
|
|
|
self.playAtActualSpeed = playAtActualSpeed
|
|
|
-
|
|
|
+
|
|
|
yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
|
|
|
self.audioSettings = audioSettings
|
|
|
} else { FilterLog(2, message: "asset is nil") }
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// MARK: -
|
|
|
-
|
|
|
+
|
|
|
// MARK: Internal processing functions
|
|
|
-
|
|
|
+
|
|
|
public func createReader() -> AVAssetReader? {
|
|
|
do {
|
|
|
let outputSettings: [String: AnyObject] =
|
|
|
[kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
|
|
|
-
|
|
|
+
|
|
|
assetReader = try AVAssetReader(asset: asset!)
|
|
|
-
|
|
|
+
|
|
|
let videoTrack: AVAssetTrack = asset!.tracks(withMediaType: .video).first!
|
|
|
-
|
|
|
+
|
|
|
videoSize = videoTrack.naturalSize
|
|
|
FilterLog(2, message: "视频大小为 : \(videoSize)")
|
|
|
-
|
|
|
+
|
|
|
if videoComposition == nil {
|
|
|
let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset!.tracks(withMediaType: .video).first!, outputSettings: outputSettings)
|
|
|
readerVideoTrackOutput.alwaysCopiesSampleData = false
|
|
@@ -305,31 +303,31 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
assetReader!.add(readerVideoTrackOutput)
|
|
|
}
|
|
|
assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE))
|
|
|
-
|
|
|
+
|
|
|
FilterLog(2, message: "set assetReader!.timeRange is \(assetReader!.timeRange)")
|
|
|
-
|
|
|
+
|
|
|
return assetReader
|
|
|
} catch {
|
|
|
debugPrint("ERROR: Unable to create asset reader: \(error)")
|
|
|
}
|
|
|
return nil
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public func startReading() {
|
|
|
FilterLog(2, message: "开始初始化")
|
|
|
mach_timebase_info(&timebaseInfo)
|
|
|
-
|
|
|
+
|
|
|
assetReader?.cancelReading()
|
|
|
-
|
|
|
+
|
|
|
guard let assetReader = createReader() else {
|
|
|
return // A return statement in this frame will end thread execution.
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
do {
|
|
|
try NSObject.catchException {
|
|
|
guard assetReader.startReading() else {
|
|
|
#if DEBUG
|
|
|
- cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
|
|
|
+ cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
|
|
|
#endif
|
|
|
debugPrint("ERROR: Unable to start reading: \(String(describing: assetReader.error))")
|
|
|
return
|
|
@@ -340,14 +338,14 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
return
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// 设置解码开始时间
|
|
|
public func resetRangeTime(startTime: CMTime = .zero) {
|
|
|
FilterLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
|
|
|
requestedStartTime = startTime
|
|
|
startReading()
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// 取出第一帧数据
|
|
|
public func readNextVideoFrame(showTimeStamp: CMTime) {
|
|
|
// XXXX 有时渲染视频取出来的画面时为黑屏,再渲染一次,数据是没有问题已经保存到沙盒进行验证,这个不是最好的方案!
|
|
@@ -356,29 +354,29 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
}
|
|
|
|
|
|
if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
|
-
|
|
|
- // 最后一帧的PTS > 要显示的目标时间 就不从解码器要数据,直接返回 view 不刷新 只有慢速时会调用
|
|
|
-// if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) + (stickerInfo?.model_in ?? 0) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
|
+
|
|
|
+ // 最后一帧的PTS > 要显示的目标时间 就不从解码器要数据,直接返回 view 不刷新 只有慢速时会调用
|
|
|
+ // if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) + (stickerInfo?.model_in ?? 0) && CMTimeGetSeconds(targetTimeStamp) != 0 {
|
|
|
FilterLog(2, message: "28797speedRate 目标显示时间 \(String(format: "%.6f", (CMTimeGetSeconds(showTimeStamp)))) 最后显示的时间 \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) speedRate is \(stickerInfo!.speedRate)")
|
|
|
return
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
if assetReader == nil {
|
|
|
FilterLog(2, message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
|
|
|
return
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
var videoTrackOutput: AVAssetReaderOutput?
|
|
|
for output in assetReader!.outputs {
|
|
|
if output.mediaType == AVMediaType.video {
|
|
|
videoTrackOutput = output
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
-
|
|
|
+
|
|
|
var sampleBuffer: CMSampleBuffer?
|
|
|
-
|
|
|
+
|
|
|
// 日志使用 count
|
|
|
var count: Int = 0
|
|
|
while assetReader?.status == .reading {
|
|
@@ -389,30 +387,30 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
return
|
|
|
}
|
|
|
targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
|
|
|
-
|
|
|
+
|
|
|
// 目标帧 时间
|
|
|
if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) {
|
|
|
let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
|
|
|
FilterLog(2, message: " 28797speedRate is \(stickerInfo!.speedRate) 当前主线时间为:\(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 要命中时间:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 差值\(CMTimeGetSeconds(targetTimeStamp) - (stickerInfo?.model_in ?? 0)) 查找耗时为:\(String(format: "%.6f", TimeInterval(endDecoderTime - beginDecoderTime))) 查找次数\(count) 进场时间: \(String(describing: moveSticker?.timelineIn)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) 裁剪结束时间:\(String(describing: moveSticker?.out)) 原视频时长: \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
|
break
|
|
|
-
|
|
|
+
|
|
|
}
|
|
|
-// else {
|
|
|
-// FilterLog(2, message: "不丢帧显示 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
|
|
|
-//// usleep(2)
|
|
|
-//// sharedImageProcessingContext.runOperationSynchronously {
|
|
|
-//// self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
|
-//// }
|
|
|
-// break
|
|
|
-// }
|
|
|
+ // else {
|
|
|
+ // FilterLog(2, message: "不丢帧显示 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
|
|
|
+ //// usleep(2)
|
|
|
+ //// sharedImageProcessingContext.runOperationSynchronously {
|
|
|
+ //// self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
|
+ //// }
|
|
|
+ // break
|
|
|
+ // }
|
|
|
}
|
|
|
// 一,显示命中的帧数据
|
|
|
if sampleBuffer != nil {
|
|
|
-// if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
|
+ // if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
|
lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
|
|
|
-// }
|
|
|
-
|
|
|
+ // }
|
|
|
+
|
|
|
sharedImageProcessingContext.runOperationSynchronously { [weak self] in
|
|
|
self?.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
|
|
|
}
|
|
@@ -426,9 +424,9 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
// 1 自动循环模式 重头开始循环
|
|
|
if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
|
|
|
FilterLog(2, message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
|
|
|
-
|
|
|
+
|
|
|
startReading()
|
|
|
-
|
|
|
+
|
|
|
} else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
|
// 2),定帧处理
|
|
|
if lastImageBuffer != nil {
|
|
@@ -444,7 +442,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
/// 渲染帧数据
|
|
|
/// - Parameters:
|
|
|
/// - movieFrame:帧数据
|
|
@@ -452,33 +450,33 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
public func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
|
|
|
// NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
|
|
|
FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
|
|
|
-
|
|
|
+
|
|
|
let bufferHeight = CVPixelBufferGetHeight(movieFrame)
|
|
|
let bufferWidth = CVPixelBufferGetWidth(movieFrame)
|
|
|
CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
|
-
|
|
|
+
|
|
|
let conversionMatrix = colorConversionMatrix601FullRangeDefault
|
|
|
-
|
|
|
+
|
|
|
// 1 Y-plane
|
|
|
var luminanceGLTexture: CVOpenGLESTexture?
|
|
|
-
|
|
|
+
|
|
|
// 激活纹理
|
|
|
glActiveTexture(GLenum(GL_TEXTURE0))
|
|
|
-
|
|
|
+
|
|
|
let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture)
|
|
|
-
|
|
|
+
|
|
|
if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil {
|
|
|
debugPrint("ERROR: Could not create LuminanceGLTexture")
|
|
|
return
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!)
|
|
|
-
|
|
|
+
|
|
|
// 绑定纹理
|
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture)
|
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
|
-
|
|
|
+
|
|
|
let luminanceFramebuffer: Framebuffer
|
|
|
do {
|
|
|
luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture)
|
|
@@ -487,25 +485,25 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
return
|
|
|
}
|
|
|
luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
|
-
|
|
|
+
|
|
|
// 2 UV-plane.
|
|
|
var chrominanceGLTexture: CVOpenGLESTexture?
|
|
|
-
|
|
|
+
|
|
|
glActiveTexture(GLenum(GL_TEXTURE1))
|
|
|
-
|
|
|
+
|
|
|
let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture)
|
|
|
-
|
|
|
+
|
|
|
if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil {
|
|
|
debugPrint("ERROR: Could not create ChrominanceGLTexture")
|
|
|
return
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
let chrominanceTexture = CVOpenGLESTextureGetName(chrominanceGLTexture!)
|
|
|
-
|
|
|
+
|
|
|
glBindTexture(GLenum(GL_TEXTURE_2D), chrominanceTexture)
|
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
|
|
|
glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
|
|
|
-
|
|
|
+
|
|
|
let chrominanceFramebuffer: Framebuffer
|
|
|
do {
|
|
|
chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture)
|
|
@@ -514,21 +512,21 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
return
|
|
|
}
|
|
|
chrominanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
|
-
|
|
|
+
|
|
|
self.movieFramebuffer?.unlock()
|
|
|
let movieFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true)
|
|
|
movieFramebuffer.lock()
|
|
|
-
|
|
|
+
|
|
|
convertYUVToRGBAK(shader: yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix)
|
|
|
CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
|
|
|
-
|
|
|
+
|
|
|
FilterLog(2, message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
|
|
|
movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
|
-
|
|
|
+
|
|
|
movieFramebuffer.userInfo = framebufferUserInfo
|
|
|
self.movieFramebuffer = movieFramebuffer
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public func convertYUVToRGBAK(shader: ShaderProgram, luminanceFramebuffer: Framebuffer, chrominanceFramebuffer: Framebuffer, secondChrominanceFramebuffer: Framebuffer? = nil, resultFramebuffer: Framebuffer, colorConversionMatrix: Matrix3x3) {
|
|
|
let textureProperties: [InputTextureProperties]
|
|
|
if let secondChrominanceFramebuffer = secondChrominanceFramebuffer {
|
|
@@ -536,22 +534,22 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
} else {
|
|
|
textureProperties = [luminanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation), chrominanceFramebuffer.texturePropertiesForTargetOrientation(resultFramebuffer.orientation)]
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
var cropTextureProperties: [InputTextureProperties] = Array()
|
|
|
for texture in textureProperties {
|
|
|
let textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: moveSticker!, textureSize: videoSize, rotationMode: moveAssetRotation(), cannvasSize: inputSize)
|
|
|
-
|
|
|
+
|
|
|
let texturePropertiesimagetwo = InputTextureProperties(textureCoordinates: textureCoordinates, texture: texture.texture)
|
|
|
cropTextureProperties.append(texturePropertiesimagetwo)
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// 清空背景色
|
|
|
// clearFramebufferWithColor(Color(red:0, green:0, blue:0, alpha:1.0))
|
|
|
let uniformSettings = ShaderUniformSettings()
|
|
|
uniformSettings["colorConversionMatrix"] = colorConversionMatrix
|
|
|
-
|
|
|
+
|
|
|
let verticesPoint: [GLfloat] = PQGPUImageTools.getVerticesPoint(sticker: moveSticker!, textureSize: (moveAssetRotation() == .rotateLeftTextureCoordinates || moveAssetRotation() == .rotateRightTextureCoordinates) ? CGSize(width: videoSize.height, height: videoSize.width) : videoSize, cannvasSize: inputSize)
|
|
|
-
|
|
|
+
|
|
|
imageVertexBuffer = PQGPUImageTools.NXGenerateVBO(for: verticesPoint)
|
|
|
renderQuadWithShader(shader,
|
|
|
uniformSettings: uniformSettings,
|
|
@@ -563,12 +561,12 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
PQGPUImageTools.deleteVBO(imageVertexBuffer)
|
|
|
imageVertexBuffer = 0
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
luminanceFramebuffer.unlock()
|
|
|
chrominanceFramebuffer.unlock()
|
|
|
secondChrominanceFramebuffer?.unlock()
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
public func nanosToAbs(_ nanos: UInt64) -> UInt64 {
|
|
|
return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
|
|
|
}
|
|
@@ -578,21 +576,21 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
import VideoToolbox
|
|
|
|
|
|
extension UIImage {
|
|
|
-// public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
-// var cgImage: CGImage?
|
|
|
-// VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
-//
|
|
|
-// guard let cgImage = cgImage else {
|
|
|
-// return nil
|
|
|
-// }
|
|
|
-//
|
|
|
-// self.init(cgImage: cgImage)
|
|
|
-// }
|
|
|
-
|
|
|
- public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
|
|
|
+ // public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
+ // var cgImage: CGImage?
|
|
|
+ // VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
+ //
|
|
|
+ // guard let cgImage = cgImage else {
|
|
|
+ // return nil
|
|
|
+ // }
|
|
|
+ //
|
|
|
+ // self.init(cgImage: cgImage)
|
|
|
+ // }
|
|
|
+
|
|
|
+ public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
|
|
|
if let imageData = currentImage.jpegData(compressionQuality: persent) {
|
|
|
let fullPath = NSHomeDirectory().appending("/Documents/").appending(imageName)
|
|
|
-
|
|
|
+
|
|
|
try? imageData.write(to: URL(fileURLWithPath: fullPath))
|
|
|
print("fullPath=\(fullPath)")
|
|
|
}
|