Browse Source

1,使用 BASE_FILTER_TIMESCALE ,2 关闭 进退场判断 使用fitler缓存统一控制 3,添加注释 LOG 4, 处理渲染视频取出来的画面有时为黑屏问题

jsonwang 3 years ago
parent
commit
fd43cad8f1
1 changed files with 30 additions and 16 deletions
  1. 30 16
      BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

+ 30 - 16
BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -140,7 +140,7 @@ class PQMovieFilter: PQBaseFilter {
         stickerInfo = movieSticker
         FilterLog(message: "资源裁剪的 开始时间\(moveSticker!.model_in)  结束时间: \(moveSticker!.out)")
         if moveSticker!.videoIsCrop() {
-            requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in * 600), timescale: 600)
+            requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
         }
 
         do {
@@ -195,7 +195,7 @@ class PQMovieFilter: PQBaseFilter {
                              vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties])
         releaseIncomingFramebuffers()
 
-        if CMTimeGetSeconds(currentTime) >= moveSticker!.timelineIn, CMTimeGetSeconds(currentTime) <= moveSticker!.timelineOut {
+//        if CMTimeGetSeconds(currentTime) >= moveSticker!.timelineIn, CMTimeGetSeconds(currentTime) <= moveSticker!.timelineOut {
             FilterLog(message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
 
             if enableSeek {
@@ -208,20 +208,20 @@ class PQMovieFilter: PQBaseFilter {
                 startTimeStamp = currentTime
             }
 
-            let stickerTime = CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600)
+            let stickerTime = CMTime(value: Int64(moveSticker?.model_in ?? 0) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
 
             let PTSTime = CMTimeAdd(stickerTime, CMTimeSubtract(currentTime, startTimeStamp ?? .zero))
 
             readNextVideoFrame(showTimeStamp: CMTime(value: CMTimeValue(Int(Float(PTSTime.value) * speedRate)), timescale: PTSTime.timescale))
 
-        } else {
-            FilterLog(message: "movefilter 了 结束了  timelineIN\(String(describing: moveSticker?.timelineIn)) timelineOut\(String(describing: moveSticker?.timelineOut)) currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪in:\(String(describing: moveSticker?.model_in))  裁剪out:\(String(describing: moveSticker?.out)) ")
-            FilterLog(message: "不显示 movefilter 了")
-
-            assetReader?.cancelReading()
-            // 重新初始化解码器
-            startReading()
-        }
+//        } else {
+//            FilterLog(message: "movefilter 了 结束了  timelineIN\(String(describing: moveSticker?.timelineIn)) timelineOut\(String(describing: moveSticker?.timelineOut)) currentTime  \(CMTimeGetSeconds(currentTime)) 裁剪in:\(String(describing: moveSticker?.model_in))  裁剪out:\(String(describing: moveSticker?.out)) ")
+//            FilterLog(message: "不显示 movefilter 了")
+//
+//            assetReader?.cancelReading()
+//            // 重新初始化解码器
+//            startReading()
+//        }
     }
 
     // 原视频角度类型
@@ -288,8 +288,7 @@ class PQMovieFilter: PQBaseFilter {
                 readerVideoTrackOutput.alwaysCopiesSampleData = false
                 assetReader!.add(readerVideoTrackOutput)
             }
-
-            assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * 600), timescale: 600), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * 600.0), timescale: 600))
+            assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE))
 
             FilterLog(message: "set   assetReader!.timeRange is \(assetReader!.timeRange)")
 
@@ -335,6 +334,12 @@ class PQMovieFilter: PQBaseFilter {
 
     // 取出第一帧数据
     func readNextVideoFrame(showTimeStamp: CMTime) {
+        
+        // XXXX 有时渲染视频取出来的画面时为黑屏,再渲染一次,数据是没有问题已经保存到沙盒进行验证,这个不是最好的方案!
+        if(lastImageBuffer != nil){
+            self.renderPixelBuffler(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
+        }
+
         if assetReader == nil {
             FilterLog(message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
             return
@@ -373,9 +378,9 @@ class PQMovieFilter: PQBaseFilter {
         }
         // 一,显示命中的帧数据
         if sampleBuffer != nil {
-            if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
+//            if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
                 lastImageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer!)!
-            }
+//            }
 
             sharedImageProcessingContext.runOperationSynchronously {
                 self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
@@ -413,14 +418,20 @@ class PQMovieFilter: PQBaseFilter {
     ///   - movieFrame:帧数据
     ///   - withSampleTime: 渲染时间戳,不是帧的 PTS 是渲染的时间
     func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
+        
+        //NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
+        FilterLog(message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
+        
         let bufferHeight = CVPixelBufferGetHeight(movieFrame)
         let bufferWidth = CVPixelBufferGetWidth(movieFrame)
         CVPixelBufferLockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
 
         let conversionMatrix = colorConversionMatrix601FullRangeDefault
 
+        //1 Y-plane
         var luminanceGLTexture: CVOpenGLESTexture?
 
+        //激活纹理
         glActiveTexture(GLenum(GL_TEXTURE0))
 
         let luminanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE, GLsizei(bufferWidth), GLsizei(bufferHeight), GLenum(GL_LUMINANCE), GLenum(GL_UNSIGNED_BYTE), 0, &luminanceGLTexture)
@@ -432,6 +443,7 @@ class PQMovieFilter: PQBaseFilter {
 
         let luminanceTexture = CVOpenGLESTextureGetName(luminanceGLTexture!)
 
+        //绑定纹理
         glBindTexture(GLenum(GL_TEXTURE_2D), luminanceTexture)
         glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GLfloat(GL_CLAMP_TO_EDGE))
         glTexParameterf(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GLfloat(GL_CLAMP_TO_EDGE))
@@ -445,6 +457,8 @@ class PQMovieFilter: PQBaseFilter {
         }
         luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
 
+        
+        //2 UV-plane.
         var chrominanceGLTexture: CVOpenGLESTexture?
 
         glActiveTexture(GLenum(GL_TEXTURE1))
@@ -478,7 +492,7 @@ class PQMovieFilter: PQBaseFilter {
         convertYUVToRGBAK(shader: yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix)
         CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
 
-        FilterLog(message: "process time is \(withSampleTime)")
+        FilterLog(message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
         movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
 
         movieFramebuffer.userInfo = framebufferUserInfo