|
@@ -174,15 +174,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
FilterLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut) 裁剪开始时间:\(movieSticker.model_in) 裁剪结束时间:\(movieSticker.out) 路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
|
|
|
|
|
|
startReading()
|
|
|
- //
|
|
|
- // if #available(iOS 10.0, *) {
|
|
|
- // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
|
|
|
- // } else {
|
|
|
- // seekQueue = DispatchQueue(label: "PQ.moveFiler.seeking", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
|
|
|
- // }
|
|
|
- // if #available(iOS 10.0, *) {
|
|
|
- // seekQueue.activate()
|
|
|
- // }
|
|
|
+
|
|
|
}
|
|
|
|
|
|
public override func newFramebufferAvailable(_ framebuffer: Framebuffer, fromSourceIndex: UInt) {
|
|
@@ -199,6 +191,9 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
FilterLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
|
|
|
|
|
|
renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
|
|
|
+
|
|
|
+ FilterLog(message: "maxTextureSize is\( renderFramebuffer.context.maximumTextureSizeForThisDevice)")
|
|
|
+
|
|
|
|
|
|
let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(mImageOrientation).textureCoordinates(), texture: inputFramebuffer.texture)
|
|
|
|
|
@@ -330,7 +325,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
|
|
|
return assetReader
|
|
|
} catch {
|
|
|
- debugPrint("ERROR: Unable to create asset reader: \(error)")
|
|
|
+ FilterLog(message:"movie filter ERROR: Unable to create asset reader: \(error)")
|
|
|
}
|
|
|
return nil
|
|
|
}
|
|
@@ -342,6 +337,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
assetReader?.cancelReading()
|
|
|
|
|
|
guard let assetReader = createReader() else {
|
|
|
+ FilterLog(message: "createReader is error")
|
|
|
return // A return statement in this frame will end thread execution.
|
|
|
}
|
|
|
|
|
@@ -351,12 +347,12 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
#if DEBUG
|
|
|
cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
|
|
|
#endif
|
|
|
- debugPrint("ERROR: Unable to start reading: \(String(describing: assetReader.error))")
|
|
|
+ FilterLog(message:"ERROR: Unable to start reading: \(String(describing: assetReader.error))")
|
|
|
return
|
|
|
}
|
|
|
}
|
|
|
} catch {
|
|
|
- debugPrint("ERROR: Unable to start reading: \(error)")
|
|
|
+ FilterLog(message:"ERROR: Unable to start reading: \(error)")
|
|
|
return
|
|
|
}
|
|
|
}
|
|
@@ -470,6 +466,8 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
/// - movieFrame:帧数据
|
|
|
/// - withSampleTime: 渲染时间戳,不是帧的 PTS 是渲染的时间
|
|
|
public func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
|
|
|
+// let image = UIImage.init(pixelBuffer: movieFrame)
|
|
|
+// image!.saveImage(currentImage: image!, persent: 0.5, imageName: "\(CMTimeGetSeconds(withSampleTime))")
|
|
|
// NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
|
|
|
FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
|
|
|
|
|
@@ -488,7 +486,8 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture)
|
|
|
|
|
|
if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil {
|
|
|
- debugPrint("ERROR: Could not create LuminanceGLTexture")
|
|
|
+
|
|
|
+ FilterLog(message:"ERROR: Could not create LuminanceGLTexture")
|
|
|
return
|
|
|
}
|
|
|
|
|
@@ -503,7 +502,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
do {
|
|
|
luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture)
|
|
|
} catch {
|
|
|
- debugPrint("ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
|
|
|
+ FilterLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
|
|
|
return
|
|
|
}
|
|
|
luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
@@ -516,7 +515,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture)
|
|
|
|
|
|
if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil {
|
|
|
- debugPrint("ERROR: Could not create ChrominanceGLTexture")
|
|
|
+ FilterLog(message:"ERROR: Could not create ChrominanceGLTexture")
|
|
|
return
|
|
|
}
|
|
|
|
|
@@ -530,7 +529,7 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
do {
|
|
|
chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture)
|
|
|
} catch {
|
|
|
- debugPrint("ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
|
|
|
+ FilterLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
|
|
|
return
|
|
|
}
|
|
|
chrominanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
|
|
@@ -598,16 +597,16 @@ public class PQMovieFilter: PQBaseFilter {
|
|
|
import VideoToolbox
|
|
|
|
|
|
extension UIImage {
|
|
|
- // public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
- // var cgImage: CGImage?
|
|
|
- // VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
- //
|
|
|
- // guard let cgImage = cgImage else {
|
|
|
- // return nil
|
|
|
- // }
|
|
|
- //
|
|
|
- // self.init(cgImage: cgImage)
|
|
|
- // }
|
|
|
+ public convenience init?(pixelBuffer: CVPixelBuffer) {
|
|
|
+ var cgImage: CGImage?
|
|
|
+ VTCreateCGImageFromCVPixelBuffer(pixelBuffer, options: nil, imageOut: &cgImage)
|
|
|
+
|
|
|
+ guard let cgImage = cgImage else {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ self.init(cgImage: cgImage)
|
|
|
+ }
|
|
|
|
|
|
public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
|
|
|
if let imageData = currentImage.jpegData(compressionQuality: persent) {
|