|
@@ -75,10 +75,7 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
// Progress block of the video with a paramater value of 0-1.
|
|
|
// Can be used to check video encoding progress. Not called from main thread.
|
|
|
public var progress: ((Double) -> Void)?
|
|
|
-
|
|
|
- // 开始绘制
|
|
|
- var isStartReading: Bool = false
|
|
|
-
|
|
|
+
|
|
|
var audioSettings: [String: Any]?
|
|
|
|
|
|
var movieFramebuffer: Framebuffer?
|
|
@@ -93,7 +90,7 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
// 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
|
|
|
var lastImageBuffer: CVImageBuffer?
|
|
|
//
|
|
|
- var currentRenderImageBuffer: CVPixelBuffer?
|
|
|
+ @Atomic var currentRenderImageBuffer: CVPixelBuffer?
|
|
|
var currentRenderImageBufferTimeStamp: CMTime = .zero
|
|
|
var currentRenderSampleBuffer: CMSampleBuffer?
|
|
|
// 旋转角度值
|
|
@@ -122,7 +119,19 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
var startTimeStamp: CMTime?
|
|
|
|
|
|
|
|
|
- deinit {}
|
|
|
+ deinit {
|
|
|
+ BFLog(message: "movie filter deinit")
|
|
|
+ clearData()
|
|
|
+ }
|
|
|
+
|
|
|
+ public override func clearData() {
|
|
|
+ super.clearData()
|
|
|
+ if(assetReader != nil){
|
|
|
+ assetReader?.cancelReading()
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
public init(url: URL) {
|
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
@@ -138,6 +147,7 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
public init(movieSticker: PQEditVisionTrackMaterialsModel) {
|
|
|
super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
|
|
|
moveSticker = movieSticker
|
|
|
+ stickerInfo = movieSticker
|
|
|
FilterLog(message: "资源裁剪的 开始时间\(moveSticker!.model_in) 结束时间: \(moveSticker!.out)")
|
|
|
if moveSticker!.videoIsCrop() {
|
|
|
requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in * 600), timescale: 600)
|
|
@@ -204,11 +214,6 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
enableSeek = false
|
|
|
}
|
|
|
|
|
|
- if !isStartReading {
|
|
|
- isStartReading = false
|
|
|
- startReading()
|
|
|
- }
|
|
|
-
|
|
|
if(startTimeStamp == nil){
|
|
|
startTimeStamp = currentTime
|
|
|
}
|
|
@@ -216,16 +221,12 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
let stickerTime = CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600)
|
|
|
|
|
|
let PTSTime = CMTimeAdd(stickerTime, CMTimeSubtract(currentTime, startTimeStamp ?? .zero))
|
|
|
-
|
|
|
- let showBuffer = getNextSampleBuffer(showTimeStamp: CMTime.init(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
|
|
|
-
|
|
|
- if(showBuffer != nil){
|
|
|
- process(movieFrame:CMSampleBufferGetImageBuffer(showBuffer!)!, withSampleTime: currentTime)
|
|
|
- }
|
|
|
+
|
|
|
+ readNextVideoFrame(showTimeStamp: CMTime.init(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
|
|
|
|
|
|
} else {
|
|
|
- isStartReading = false
|
|
|
- FilterLog(message: "开始显示 movefilter 了 结束了\(String(describing: moveSticker?.timelineIn)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
|
|
|
+
|
|
|
+ FilterLog(message: "movefilter 了 结束了 timelineIN\(String(describing: moveSticker?.timelineIn)) timelineOut\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪in:\(String(describing: moveSticker?.model_in)) 裁剪out:\(String(describing: moveSticker?.out)) ")
|
|
|
FilterLog(message: "不显示 movefilter 了")
|
|
|
|
|
|
assetReader?.cancelReading()
|
|
@@ -312,8 +313,7 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
FilterLog(message: "开始初始化")
|
|
|
|
|
|
assetReader?.cancelReading()
|
|
|
-
|
|
|
- isStartReading = true
|
|
|
+
|
|
|
guard let assetReader = createReader() else {
|
|
|
return // A return statement in this frame will end thread execution.
|
|
|
}
|
|
@@ -342,6 +342,167 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
startReading()
|
|
|
}
|
|
|
|
|
|
+ // 取出第一帧数据
|
|
|
+ func readNextVideoFrame(showTimeStamp:CMTime) {
|
|
|
+
|
|
|
+
|
|
|
+ if assetReader == nil {
|
|
|
+ FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ var videoTrackOutput: AVAssetReaderOutput?
|
|
|
+ for output in assetReader!.outputs {
|
|
|
+ if output.mediaType == AVMediaType.video {
|
|
|
+ videoTrackOutput = output
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
+
|
|
|
+ var sampleBuffer: CMSampleBuffer?
|
|
|
+
|
|
|
+ //日志使用 count
|
|
|
+ var count: Int = 0
|
|
|
+ while self.assetReader?.status == .reading {
|
|
|
+
|
|
|
+ count = count + 1
|
|
|
+ sampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
|
|
|
+ if(sampleBuffer == nil){
|
|
|
+ BFLog(message: " copyNextSampleBuffer is nil error!!!")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
|
|
|
+
|
|
|
+ //目标帧 时间
|
|
|
+ if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp){
|
|
|
+
|
|
|
+ let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
+ FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(CMTimeGetSeconds(targetTimeStamp)) 查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)) 查找次数\(count) 进场时间: \(String(describing: moveSticker?.timelineIn)) 裁剪开始时间:\(String(describing: moveSticker?.model_in))")
|
|
|
+ break
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+// if lastImageBuffer != nil {
|
|
|
+// FilterLog(message: "处理显示定帧")
|
|
|
+// let currTime = CMTimeGetSeconds(currentTime)
|
|
|
+// FilterLog(message: "process time is \(currTime)")
|
|
|
+// process(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
|
|
|
+// }else{
|
|
|
+// processCurrentBuffer(sampleBuffer: sampleBuffer!)
|
|
|
+//
|
|
|
+// lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)
|
|
|
+//
|
|
|
+// }
|
|
|
+ if(sampleBuffer != nil){
|
|
|
+ lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
|
|
|
+ processCurrentBuffer(sampleBuffer: sampleBuffer!)
|
|
|
+ }else{
|
|
|
+ FilterLog(message: "sampleBuffer is nil data is error")
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+ /*
|
|
|
+ if sampleBuffer == nil && assetReader?.status == .completed {
|
|
|
+ FilterLog(message: "已经播放完成了 \(CMTimeGetSeconds(currentTime))")
|
|
|
+ // 如果不是自动循环模式 且 定帧/裁剪模式 播放完一次后自动停止
|
|
|
+
|
|
|
+ if moveSticker?.materialDurationFit?.fitType != adapterMode.loopAuto.rawValue, moveSticker?.materialDurationFit?.fitType != adapterMode.staticFrame.rawValue {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ // 自动循环模式 重头开始循环
|
|
|
+ if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
|
|
|
+ FilterLog(message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
|
|
|
+ isStartReading = false
|
|
|
+ startReading()
|
|
|
+
|
|
|
+ } else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
|
|
|
+ if lastImageBuffer != nil {
|
|
|
+ FilterLog(message: "处理显示定帧")
|
|
|
+ let currTime = CMTimeGetSeconds(currentTime!)
|
|
|
+ FilterLog(message: "process time is \(currTime)")
|
|
|
+ process(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
|
|
|
+ }
|
|
|
+
|
|
|
+ return
|
|
|
+ }
|
|
|
+
|
|
|
+ return
|
|
|
+ } else {
|
|
|
+ FilterLog(message: "copy sampleBuffer is error ??? \(String(describing: assetReader?.status)) \(sampleBuffer)")
|
|
|
+ }
|
|
|
+
|
|
|
+ if sampleBuffer != nil {
|
|
|
+ if enableSeek {
|
|
|
+ BFLog(message: "cacheframeBuffers 添加后 个数\(cacheframeBuffers.count)")
|
|
|
+ cacheframeBuffers.append(sampleBuffer!)
|
|
|
+ } else {
|
|
|
+ // 正常处理每一帧
|
|
|
+ processCurrentBuffer(sampleBuffer: sampleBuffer!)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ */
|
|
|
+ }
|
|
|
+
|
|
|
+ /*
|
|
|
+ func getNextSampleBufferQueueAsync(showTimeStamp:CMTime){
|
|
|
+
|
|
|
+ BFLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp))")
|
|
|
+
|
|
|
+ seekQueue.async {
|
|
|
+
|
|
|
+ let beginDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
+ if self.assetReader == nil {
|
|
|
+ FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
|
|
|
+// return
|
|
|
+ }
|
|
|
+ var videoTrackOutput: AVAssetReaderOutput?
|
|
|
+ for output in self.assetReader!.outputs {
|
|
|
+ if output.mediaType == AVMediaType.video {
|
|
|
+ videoTrackOutput = output
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ var targetSampleBuffer: CMSampleBuffer?
|
|
|
+
|
|
|
+ while self.assetReader?.status == .reading {
|
|
|
+
|
|
|
+ targetSampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
|
|
|
+ if(targetSampleBuffer == nil){
|
|
|
+ BFLog(message: " copyNextSampleBuffer is nil error!!!")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
|
|
|
+
|
|
|
+ //目标帧 时间
|
|
|
+ if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) > CMTimeGetSeconds(showTimeStamp){
|
|
|
+
|
|
|
+ let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
+ FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(CMTimeGetSeconds(targetTimeStamp)) 查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)))")
|
|
|
+
|
|
|
+
|
|
|
+ sharedImageProcessingContext.runOperationSynchronously {
|
|
|
+ self.currentRenderImageBuffer = CMSampleBufferGetImageBuffer(targetSampleBuffer!)!
|
|
|
+// self.process(movieFrame:self.currentRenderImageBuffer!, withSampleTime: self.currentTime)
|
|
|
+ self.processCurrentBuffer(sampleBuffer: targetSampleBuffer!)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
|
|
|
func getNextSampleBuffer(showTimeStamp:CMTime) -> CMSampleBuffer? {
|
|
|
|
|
@@ -371,9 +532,13 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
let targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(targetSampleBuffer!)
|
|
|
|
|
|
//目标帧 时间
|
|
|
- if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) > CMTimeGetSeconds(showTimeStamp){
|
|
|
+ if targetSampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) - CMTimeGetSeconds(showTimeStamp) > 0.01{
|
|
|
+
|
|
|
let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
|
|
|
- FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(CMTimeGetSeconds(targetTimeStamp)) 查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)))")
|
|
|
+
|
|
|
+// if(TimeInterval(endDecoderTime - beginDecoderTime) > 0.033){
|
|
|
+ FilterLog(message: "查找的帧时间为:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(CMTimeGetSeconds(targetTimeStamp)) 查找时长为\(TimeInterval(endDecoderTime - beginDecoderTime)) filter 显示时长\(moveSticker?.timelineIn)")
|
|
|
+// }
|
|
|
|
|
|
return targetSampleBuffer
|
|
|
|
|
@@ -384,13 +549,20 @@ class PQMovieFilter: PQBaseFilter {
|
|
|
return nil
|
|
|
|
|
|
}
|
|
|
-
|
|
|
+*/
|
|
|
+ func process(movieFrame frame: CMSampleBuffer) {
|
|
|
+ let movieFrame = (CMSampleBufferGetImageBuffer(frame) == nil) ? lastImageBuffer : CMSampleBufferGetImageBuffer(frame)!
|
|
|
+ if movieFrame != nil {
|
|
|
+ process(movieFrame: movieFrame!, withSampleTime: currentTime)
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
func processCurrentBuffer(sampleBuffer: CMSampleBuffer) {
|
|
|
let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
|
|
|
let duration = asset!.duration // Only used for the progress block so its acuracy is not critical
|
|
|
|
|
|
sharedImageProcessingContext.runOperationSynchronously {
|
|
|
-// self.process(movieFrame: sampleBuffer)
|
|
|
+ self.process(movieFrame: sampleBuffer)
|
|
|
|
|
|
FilterLog(message: "seek 时间为: 素材 \(String(describing: self.moveSticker?.locationPath)) 取出每一帧 显示时间: \(CMTimeGetSeconds(self.currentTime)) 帧时间 \(CMTimeGetSeconds(currentSampleTime))")
|
|
|
|