Przeglądaj źródła

1,添加播放器真实播放时间LOG

jsonwang 3 lat temu
rodzic
commit
f9c7a46ca8

+ 24 - 24
BFFramework/Classes/PQGPUImage/akfilters/PQMovieInput.swift

@@ -117,10 +117,14 @@ public class PQMovieInput: ImageSource {
 
     // 是否为导出模式
     public   var mIsExport: Bool = false
+    
+    //打印开始时间
+    var debugStartTime:CFTimeInterval?
+
      // 画布的大小 注意要是偶数 要不在 IOS 13上会有绿边 自动放大到偶数
-   public var mShowVidoSize: CGSize = cVideoCannvasSizeOneToOne {
+    public var mShowVidoSize: CGSize = cVideoCannvasSizeOneToOne {
         didSet {
-            BFLog(message: "mShowVidoSize is move input  \(mShowVidoSize)")
+            FilterLog(message: "mShowVidoSize is move input  \(mShowVidoSize)")
             do {
                 displayLink?.isPaused = true
                 imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(mShowVidoSize.width), height: GLint(mShowVidoSize.height)), textureOnly: true)
@@ -131,8 +135,6 @@ public class PQMovieInput: ImageSource {
             }
         }
     }
-
-    // TODO: Someone will have to add back in the AVPlayerItem logic, because I don't know how that works
     // 初始化方法
     public init(asset: AVAsset, videoComposition: AVVideoComposition?, audioMix: AVAudioMix?, playAtActualSpeed: Bool = false, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
         self.asset = asset
@@ -191,8 +193,8 @@ public class PQMovieInput: ImageSource {
         isPlay = false
         beginTime = 0
         currentTime = .zero
-        BFLog(message: "播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end))")
-      
+        FilterLog(message: "初始化播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end)) 播放总时长:\(CMTimeGetSeconds(timeRange.end) - CMTimeGetSeconds(timeRange.start))")
+  
         playeTimeRange = timeRange
         startTime = playeTimeRange.start
 
@@ -250,8 +252,10 @@ public class PQMovieInput: ImageSource {
 
             let prgressValue = currTime / duration
 
+            FilterLog(message: "播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放进度当前时间:\(currTime)")
             if prgressValue > 1 {
-                FilterLog(message: "播放完成")
+                FilterLog(message: "全部播放完成 总用时为:\(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
+ 
                 self.delegate?.didFinishMovie()
                 self.completion?()
  
@@ -276,6 +280,9 @@ public class PQMovieInput: ImageSource {
     // 开始 isFreeBuffer 是否是定首帧功能,定首帧就不处理声音了 会播放出来
     @objc public func start(isFreeBuffer: Bool, isExport: Bool = false,timeRange:CMTimeRange = CMTimeRange.init()) {
         
+        debugStartTime = CFAbsoluteTimeGetCurrent()
+        FilterLog(message: "开始播放的系统时钟时间 \(String(describing: debugStartTime))")
+        
         playeTimeRange = timeRange
         readerAudioTrackOutput = nil
         FilterLog(message: "PQMoveInput开始")
@@ -286,7 +293,7 @@ public class PQMovieInput: ImageSource {
 
         isPlay = true
         if assetReader == nil {
-            BFLog(message: "assetReader is null!!!!!")
+            FilterLog(message: "assetReader is null!!!!!")
             return
         }
 
@@ -411,7 +418,7 @@ public class PQMovieInput: ImageSource {
             }
 
             assetReader.timeRange = playeTimeRange
-            BFLog(message: "播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end))")
+            FilterLog(message: "播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end))")
        
 
             actualStartTime = nil
@@ -446,7 +453,7 @@ public class PQMovieInput: ImageSource {
 
     func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput: AVAssetReaderOutput) {
         if !isPlay {
-            BFLog(message: "自动停到首帧的不处理音频")
+            FilterLog(message: "自动停到首帧的不处理音频")
             return
         }
         /*
@@ -490,18 +497,7 @@ public class PQMovieInput: ImageSource {
 
             // 设置当前帧的时间戳
             imageFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime))
-//        print(" Framebuffer \(imageFramebuffer.texture)")
-
-//        glBindTexture(GLenum(GL_TEXTURE_2D), imageFramebuffer.texture)
-//        if true {
-//            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR_MIPMAP_LINEAR)
-//        }
-//
-//        if true {
-//            glGenerateMipmap(GLenum(GL_TEXTURE_2D))
-//        }
-//        glBindTexture(GLenum(GL_TEXTURE_2D), 0)
-
+ 
             updateTargetsWithFramebuffer(imageFramebuffer)
 
             imageFramebuffer.unlock()
@@ -513,8 +509,12 @@ public class PQMovieInput: ImageSource {
             if runBenchmark {
                 let currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime)
                 totalFrameTimeDuringCapture += currentFrameTime
-                print("Average frame time : \(1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent)) ms")
-                print("Current frame time : \(1000.0 * currentFrameTime) ms")
+             
+                FilterLog(message:"currentTime is \(CMTimeGetSeconds(currentTime))")
+                FilterLog(message:"Average frame time : \(1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent)) ms")
+                FilterLog(message:"Current frame time : \(1000.0 * currentFrameTime) ms")
+                FilterLog(message:"totalFrameTimeDuringCapture is \(totalFrameTimeDuringCapture)")
+
             }
 
             if mAutoPause {