فهرست منبع

Merge branch 'master' of https://git.yishihui.com/iOS/BFFramework
合并代码

jsonwang 3 سال پیش
والد
کامیت
2ed15b5685
24فایلهای تغییر یافته به همراه444 افزوده شده و 461 حذف شده
  1. 1 2
      BFFramework.podspec
  2. 16 16
      BFFramework/Classes/EventTrack/ViewModel/PQEventTrackViewModel.swift
  3. 1 0
      BFFramework/Classes/PModels/PQVideoListModel.swift
  4. 9 2
      BFFramework/Classes/PQGPUImage/Source/iOS/MovieOutput.swift
  5. 12 12
      BFFramework/Classes/PQGPUImage/akfilters/PQBaseFilter.swift
  6. 21 21
      BFFramework/Classes/PQGPUImage/akfilters/PQGPUImageTools.swift
  7. 12 12
      BFFramework/Classes/PQGPUImage/akfilters/PQGifFilter.swift
  8. 18 18
      BFFramework/Classes/PQGPUImage/akfilters/PQImageFilter.swift
  9. 38 38
      BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift
  10. 47 47
      BFFramework/Classes/PQGPUImage/akfilters/PQMovieInput.swift
  11. 6 6
      BFFramework/Classes/PQGPUImage/akfilters/PQTextFilter.swift
  12. 4 1
      BFFramework/Classes/SelectImage/PQImageCropVC.swift
  13. 0 0
      BFFramework/Classes/SelectImage/PQImageSelectedController.swift
  14. 0 0
      BFFramework/Classes/SelectImage/PQSelecteVideoItemCell.swift
  15. 52 60
      BFFramework/Classes/SelectImage/PQUploadController.swift
  16. 2 0
      BFFramework/Classes/Stuckpoint/Controller/PQStuckPointMaterialController.swift
  17. 8 15
      BFFramework/Classes/Stuckpoint/Controller/PQStuckPointMusicContentController.swift
  18. 161 173
      BFFramework/Classes/Stuckpoint/Controller/PQStuckPointPublicController.swift
  19. 2 2
      BFFramework/Classes/Stuckpoint/View/PQStuckPointMusicContentCell.swift
  20. 29 29
      BFFramework/Classes/Stuckpoint/ViewModel/PQGPUImagePlayerView.swift
  21. 2 4
      BFFramework/Classes/Utils/PQRequestURLUtil.swift
  22. 2 2
      BFFramework/Classes/Utils/PQSingletoMemoryUtil.swift
  23. 0 0
      BFFramework/Classes/Utils/PQSingletoVideoPlayer.swift
  24. 1 1
      Example/Podfile.lock

+ 1 - 2
BFFramework.podspec

@@ -59,7 +59,6 @@ TODO: Add long description of the pod here.
   s.dependency 'WechatOpenSDK-Swift'      ,'1.8.7.1'  # 微信组件
   s.dependency 'MJRefresh'                ,'3.7.2'    # 刷新组件
   s.dependency 'LMJHorizontalScrollText'  ,'2.0.2'
-  s.dependency 'TXLiteAVSDK_Player'       ,'9.2.10637' # 腾讯播放器组件
+  s.dependency 'TXLiteAVSDK_Player'       ,'9.3.10765' # 腾讯播放器组件
   s.dependency 'Bugly'                    ,'2.5.90'   #crash log 收集
-
 end

+ 16 - 16
BFFramework/Classes/EventTrack/ViewModel/PQEventTrackViewModel.swift

@@ -200,21 +200,21 @@ public class PQEventTrackViewModel: NSObject {
         }
     }
 
-//    / 分享上报
-//    / - Parameters:
-//    /   - isShareVideo: 是否是分享视频
-//    /   - screenType: 分享场景 1-分享视频/用户 2-分享视频到朋友圈 3-分享视频到好友
-//    /   - videoId: 视频Id
-//    /   - pageSource: 页面枚举
-//    /   - recommendId: <#recommendId description#>
-//    /   - recommendLogVO: <#recommendLogVO description#>
-//    /   - abInfoData: <#abInfoData description#>
-//    /   - measureType: <#measureType description#>
-//    /   - measureId: <#measureId description#>
-//    /   - businessType: <#businessType description#>
-//    /   - targetUid: <#targetUid description#>
-//    /   - shareId: <#shareId description#>
-    public class func shareReportUpload(isShareVideo: Bool = true, screenType: Int = 1, videoId: String, pageSource: PAGESOURCE, recommendId: String?, recommendLogVO: String?,flowPool:String?, abInfoData: String?, measureType: Int?, measureId: Int?, businessType: businessType?, targetUid: Int?, shareId: String = "",extParams: [String: Any]? = nil) {
+    // 分享上报
+    // - Parameters:
+    //   - isShareVideo: 是否是分享视频
+    //   - screenType: 分享场景 1-分享视频/用户 2-分享视频到朋友圈 3-分享视频到好友
+    //   - videoId: 视频Id
+    //   - pageSource: 页面枚举
+    //   - recommendId: <#recommendId description#>
+    //   - recommendLogVO: <#recommendLogVO description#>
+    //   - abInfoData: <#abInfoData description#>
+    //   - measureType: <#measureType description#>
+    //   - measureId: <#measureId description#>
+    //   - businessType: <#businessType description#>
+    //   - targetUid: <#targetUid description#>
+    //   - shareId: <#shareId description#>
+    public class func shareReportUpload(isShareVideo: Bool = true, screenType: Int = 1, videoId: String, pageSource: PAGESOURCE, recommendId: String?, recommendLogVO: String?,flowPool:String?, abInfoData: String?, measureType: Int?, measureId: Int?, businessType: businessType?, targetUid: Int?, shareId: String = "",playId:String?,extParams: [String: Any]? = nil) {
         DispatchQueue.global().async {
             var url: String = PQENVUtil.shared.longvideoapi
             switch screenType {
@@ -227,7 +227,7 @@ public class PQEventTrackViewModel: NSObject {
             default:
                 break
             }
-            var params: [String: Any] = ["type": isShareVideo ? "1" : "2", "videoId": videoId, "pageSource": pageSource.rawValue, "playId": PQSingletoVideoPlayer.shared.playId, "targetUid": targetUid ?? 0, "shareDepth": "0"]
+            var params: [String: Any] = ["type": isShareVideo ? "1" : "2", "videoId": videoId, "pageSource": pageSource.rawValue, "playId": playId ?? "", "targetUid": targetUid ?? 0, "shareDepth": "0"]
             if extParams != nil {
                 params["extParams"] = dictionaryToJsonString(extParams!)
             }

+ 1 - 0
BFFramework/Classes/PModels/PQVideoListModel.swift

@@ -120,6 +120,7 @@ public protocol BFVideoItemProtocol {
     var extParams:String { get set }
     // 话题信息
     var topicData: [String: Any]? { get set }
+    var uplpadImage: UIImage? { get set } // 上传的图片封面
 }
 
 open class PQVideoListModel: BFBaseModel, BFVideoItemProtocol {

+ 9 - 2
BFFramework/Classes/PQGPUImage/Source/iOS/MovieOutput.swift

@@ -63,7 +63,9 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
         imageProcessingShareGroup = sharedImageProcessingContext.context.sharegroup
         let movieProcessingContext = OpenGLContext()
 
-        self.size = size
+        
+        self.size = Size(width: size.width == 0 ? 1080 : size.width,
+                           height: size.height == 0 ? 1080 : size.height)
 
         assetWriter = try AVAssetWriter(url: URL, fileType: fileType)
 
@@ -288,8 +290,13 @@ public class MovieOutput: ImageConsumer, AudioEncodingTarget {
         let bufferSize = GLSize(size)
         var cachedTextureRef: CVOpenGLESTexture?
         _ = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, movieProcessingContext.coreVideoTextureCache, pixelBuffer, nil, GLenum(GL_TEXTURE_2D), GL_RGBA, bufferSize.width, bufferSize.height, GLenum(GL_BGRA), GLenum(GL_UNSIGNED_BYTE), 0, &cachedTextureRef)
-        let cachedTexture = CVOpenGLESTextureGetName(cachedTextureRef!)
+        
+        //cachedTextureRef可能为空,导致使用 cachedTextureRef! 语法崩溃
+        guard let newCachedTextureRef = cachedTextureRef else {
+            return
+        }
 
+        let cachedTexture = CVOpenGLESTextureGetName(newCachedTextureRef)
         renderFramebuffer = try Framebuffer(context: movieProcessingContext, orientation: .portrait, size: bufferSize, textureOnly: false, overriddenTexture: cachedTexture)
 
         renderFramebuffer.activateFramebufferForRendering()

+ 12 - 12
BFFramework/Classes/PQGPUImage/akfilters/PQBaseFilter.swift

@@ -12,18 +12,18 @@ import BFCommonKit
 // 时间精度
 public let BASE_FILTER_TIMESCALE:Int32 = 1_000_000
 
-// 是否打印 filter 相关 LOG 默认为关闭状态 (0 为关闭) FilterLog 会影响性能不查看时一定要关闭
-let BASE_FILTER_ENABLE_LOG: Int = 0
-/** 打印 */
-func FilterLog<T>( _ type : Int = 0,message: T) {
-    
-    if(type == 0){
-        if BASE_FILTER_ENABLE_LOG == 1 {
-            print(message)
-        }
-    }
-  
-}
+//// 是否打印 filter 相关 LOG 默认为关闭状态 (0 为关闭) BFLog 会影响性能不查看时一定要关闭
+//let BASE_FILTER_ENABLE_LOG: Int = 0
+///** 打印 */
+//func FilterLog<T>( _ type : Int = 0,message: T) {
+//    
+//    if(type == 0){
+//        if BASE_FILTER_ENABLE_LOG == 1 {
+//            print(message)
+//        }
+//    }
+//  
+//}
 
 open class PQBaseFilter: BasicOperation {
     

+ 21 - 21
BFFramework/Classes/PQGPUImage/akfilters/PQGPUImageTools.swift

@@ -48,9 +48,9 @@ open class PQGPUImageTools: NSObject {
         glDeleteBuffers(1, &deletedVBO)
     }
 
-    // FilterLog XXXXX 使用方法后 返回的 imageTexture 要清空纹理,注意不清空显存会暴增     glDeleteTextures(1,&imageTexture)
+    // BFLog XXXXX 使用方法后 返回的 imageTexture 要清空纹理,注意不清空显存会暴增     glDeleteTextures(1,&imageTexture)
     class func setupTexture(image: CGImage) -> GLuint {
-        FilterLog(2, message: "FilterLog =====width :\(image.width) height: \(image.height)")
+        BFLog(2, message: "BFLog =====width :\(image.width) height: \(image.height)")
 
         let widthOfImage = GLint(image.width)
         let heightOfImage = GLint(image.height)
@@ -119,7 +119,7 @@ open class PQGPUImageTools: NSObject {
                     }
                 }
             }
-            FilterLog(2, message: "FilterLog2222 =====")
+            BFLog(2, message: "BFLog2222 =====")
 
             //        sharedImageProcessingContext.runOperationSynchronously{
             //    CFAbsoluteTime elapsedTime, startTime = CFAbsoluteTimeGetCurrent();
@@ -142,10 +142,10 @@ open class PQGPUImageTools: NSObject {
                 dataFromImageDataProvider = data
                 imageData = UnsafeMutablePointer<GLubyte>(mutating: CFDataGetBytePtr(dataFromImageDataProvider))
             }
-            FilterLog(2, message: "FilterLog333333 =====")
+            BFLog(2, message: "BFLog333333 =====")
 
             glEnable(GLenum(GL_TEXTURE_2D))
-            FilterLog(2, message: "FilterLog44444 =====")
+            BFLog(2, message: "BFLog44444 =====")
 
             /**
              *  GL_TEXTURE_2D表示操作2D纹理
@@ -154,10 +154,10 @@ open class PQGPUImageTools: NSObject {
              */
 
             glGenTextures(1, &textureID)
-            FilterLog(2, message: "FilterLog5555 =====\(textureID)")
+            BFLog(2, message: "BFLog5555 =====\(textureID)")
 
             glBindTexture(GLenum(GL_TEXTURE_2D), textureID)
-            FilterLog(2, message: "FilterLog6666 =====\(textureID)")
+            BFLog(2, message: "BFLog6666 =====\(textureID)")
 
             /**
              *  纹理过滤函数
@@ -187,29 +187,29 @@ open class PQGPUImageTools: NSObject {
              * 参数8:type
              * 参数9:纹理数据
              */
-            FilterLog(2, message: "载入纹理 =====")
-            FilterLog(2, message: "GL_TEXTURE_2D =====\(GL_TEXTURE_2D)")
-            FilterLog(2, message: "GL_RGBA =====\(GL_RGBA)")
-            FilterLog(2, message: "widthOfImage =====\(widthOfImage)")
-            FilterLog(2, message: "heightOfImage =====\(heightOfImage)")
-            FilterLog(2, message: "GL_UNSIGNED_BYTE =====\(GL_UNSIGNED_BYTE)")
-            FilterLog(2, message: "imageData =====\(String(describing: imageData))")
-            FilterLog(2, message: "GLenum(GL_TEXTURE_2D) =====\(GLenum(GL_TEXTURE_2D))")
-            FilterLog(2, message: "GLenum(format) =====\(GLenum(format))")
-            FilterLog(2, message: "GLenum(GL_UNSIGNED_BYTE) =====\(GLenum(GL_UNSIGNED_BYTE))")
+            BFLog(2, message: "载入纹理 =====")
+            BFLog(2, message: "GL_TEXTURE_2D =====\(GL_TEXTURE_2D)")
+            BFLog(2, message: "GL_RGBA =====\(GL_RGBA)")
+            BFLog(2, message: "widthOfImage =====\(widthOfImage)")
+            BFLog(2, message: "heightOfImage =====\(heightOfImage)")
+            BFLog(2, message: "GL_UNSIGNED_BYTE =====\(GL_UNSIGNED_BYTE)")
+            BFLog(2, message: "imageData =====\(String(describing: imageData))")
+            BFLog(2, message: "GLenum(GL_TEXTURE_2D) =====\(GLenum(GL_TEXTURE_2D))")
+            BFLog(2, message: "GLenum(format) =====\(GLenum(format))")
+            BFLog(2, message: "GLenum(GL_UNSIGNED_BYTE) =====\(GLenum(GL_UNSIGNED_BYTE))")
 
             glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, widthToUseForTexture, heightToUseForTexture, 0, GLenum(format), GLenum(GL_UNSIGNED_BYTE), imageData)
 
             // 结束后要做清理
-            FilterLog(2, message: "结束后要做清理 =====")
-            FilterLog(2, message: "GLenum(GL_TEXTURE_2D) =====\(GLenum(GL_TEXTURE_2D))")
+            BFLog(2, message: "结束后要做清理 =====")
+            BFLog(2, message: "GLenum(GL_TEXTURE_2D) =====\(GLenum(GL_TEXTURE_2D))")
 
             glBindTexture(GLenum(GL_TEXTURE_2D), 0) // 解绑
 
-            FilterLog(2, message: "结束后要做清理1111111 =====")
+            BFLog(2, message: "结束后要做清理1111111 =====")
             imageData.deallocate()
 
-            FilterLog(2, message: "textureID =====\(textureID)")
+            BFLog(2, message: "textureID =====\(textureID)")
         }
 
         return textureID

+ 12 - 12
BFFramework/Classes/PQGPUImage/akfilters/PQGifFilter.swift

@@ -33,7 +33,7 @@ open class PQGifFilter: PQBaseFilter {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
         mSticker = sticker
 
-        FilterLog(2, message: "gif file path \(mSticker?.locationPath ?? "")")
+        BFLog(2, message: "gif file path \(mSticker?.locationPath ?? "")")
         var gifData: Data!
         autoreleasepool {
             do {
@@ -45,7 +45,7 @@ open class PQGifFilter: PQBaseFilter {
                     if (!gifFilePath.contains("var/mobile/Media")) {
                         gifFilePath = documensDirectory + gifFilePath
                     }
-                    FilterLog(message: "gifFilePath is \(gifFilePath)")
+                    BFLog(message: "gifFilePath is \(gifFilePath)")
                     gifData = try Data(contentsOf: URL(fileURLWithPath: gifFilePath))
                 }
                
@@ -56,14 +56,14 @@ open class PQGifFilter: PQBaseFilter {
         }
 
         if gifData == nil {
-            FilterLog(message: "gif数据有问题!")
+            BFLog(message: "gif数据有问题!")
             return
         }
         PQPHAssetVideoParaseUtil.parasGIFImage(data: gifData) { [weak self] _, images, duration in
 
             self?.mGifImages = images ?? []
             self?.mDuration = duration ?? 0
-            FilterLog(2, message: "gif 原始时长 \(String(describing: duration)) 逻辑时长: \(String(describing: self?.mSticker?.aptDuration)) 帧数:\(String(describing: images?.count))")
+            BFLog(2, message: "gif 原始时长 \(String(describing: duration)) 逻辑时长: \(String(describing: self?.mSticker?.aptDuration)) 帧数:\(String(describing: images?.count))")
 
             if images!.count > 0 {
                 self?.gifSize = images!.first!.size
@@ -75,7 +75,7 @@ open class PQGifFilter: PQBaseFilter {
 
             if self?.mSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue || self?.mSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
                 self?.delayTime = (self?.mDuration ?? 1) / Double(self?.mGifImages.count ?? 1)
-                FilterLog(message: "正常速度每一帧的时间:\(String(describing: self?.delayTime))")
+                BFLog(message: "正常速度每一帧的时间:\(String(describing: self?.delayTime))")
             }
             sharedImageProcessingContext.runOperationSynchronously {
                 // ( 提前渲染一帧
@@ -91,23 +91,23 @@ open class PQGifFilter: PQBaseFilter {
     func updateImages(_ currTime: Float64) {
         autoreleasepool {
             if mGifImages.count == 0 {
-                FilterLog(2, message: "gif 文件有问题 一帧都没有")
+                BFLog(2, message: "gif 文件有问题 一帧都没有")
                 return
             }
             if delayTime <= 0 {
-                FilterLog(2, message: "gif 时间计算出错")
+                BFLog(2, message: "gif 时间计算出错")
                 return
             }
             // 判断显示哪一帧
             var gifIndex = delayTime <= 0 ? 0 : Int(((currTime >= beginTime) ? currTime - beginTime : currTime) / delayTime)
             if mSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue && gifIndex >= mGifImages.count { // 定帧
-                FilterLog(2, message: "定帧效果 \(gifIndex)")
+                BFLog(2, message: "定帧效果 \(gifIndex)")
                 imageTexture = PQGPUImageTools.setupTexture(image: (mGifImages.last?.cgImage)!)
                 return
             }
 
             if gifIndex >= mGifImages.count && mSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue { // 自动循环重新计算开始时间 达到循环效果
-                FilterLog(2, message: "自动循环效果")
+                BFLog(2, message: "自动循环效果")
                 gifIndex = gifIndex % mGifImages.count
             }
 
@@ -143,7 +143,7 @@ open class PQGifFilter: PQBaseFilter {
             let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
 
             let currTime = CMTimeGetSeconds(CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale))
-            FilterLog(2, message: "gif filter 当前时间: \(currTime) ")
+            BFLog(2, message: "gif filter 当前时间: \(currTime) ")
 
             // 原有画布
             renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: inputSize, stencil: false)
@@ -157,7 +157,7 @@ open class PQGifFilter: PQBaseFilter {
             releaseIncomingFramebuffers()
 
             if currTime >= mSticker!.timelineIn, currTime <= mSticker!.timelineOut {
-                FilterLog(2, message: " 显示gif当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  ")
+                BFLog(2, message: " 显示gif当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  ")
 
                 // 绘制 image
                 let textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: mSticker!, textureSize: gifSize, cannvasSize: inputSize)
@@ -179,7 +179,7 @@ open class PQGifFilter: PQBaseFilter {
                 // XXXXXXX 清空纹理,注意不清空显存会暴增
                 glDeleteTextures(1, &imageTexture)
             } else {
-                FilterLog(2, message: " 不显示gif时: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)")
+                BFLog(2, message: " 不显示gif时: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)")
             }
         }
     }

+ 18 - 18
BFFramework/Classes/PQGPUImage/akfilters/PQImageFilter.swift

@@ -20,7 +20,7 @@ open class PQImageFilter: PQBaseFilter {
     //临时方案,是否是卡点模式
     public var isPointModel:Bool = false
     deinit {
-        FilterLog(1, message: "image filter deinit 析构掉~")
+        BFLog(1, message: "image filter deinit 析构掉~")
         newImage = nil
 
         if imageTexture != 0 {
@@ -41,7 +41,7 @@ open class PQImageFilter: PQBaseFilter {
             newImage = UIImage(data: sticker.originalData!)
         } else {
             if mSticker!.locationPath.count == 0 {
-                FilterLog(2, message: "图片数据为空,创建失败")
+                BFLog(2, message: "图片数据为空,创建失败")
                 
                 return
             }
@@ -49,7 +49,7 @@ open class PQImageFilter: PQBaseFilter {
             if !imageFilePath.contains("var/mobile/Media") {
                 imageFilePath = documensDirectory + imageFilePath
             }
-            FilterLog(message: "imageFilePath is \(imageFilePath)")
+            BFLog(message: "imageFilePath is \(imageFilePath)")
             newImage = UIImage(contentsOfFile: imageFilePath)
         }
         // 保证是正方向
@@ -75,7 +75,7 @@ open class PQImageFilter: PQBaseFilter {
             let maxLength = max(showUISize.width, showUISize.height)
             newImage = newImage?.nx_scaleWithMaxLength(maxLength: CGFloat(maxLength * UIScreen.main.scale))
 
-            FilterLog(message: "newImage is \(newImage?.size.width ?? 0) \(newImage?.size.height ?? 0)")
+            BFLog(message: "newImage is \(newImage?.size.width ?? 0) \(newImage?.size.height ?? 0)")
         }
 
     
@@ -85,30 +85,30 @@ open class PQImageFilter: PQBaseFilter {
                 if !imageFilePath.contains("var/mobile/Media") {
                     imageFilePath = documensDirectory + imageFilePath
                 }
-                FilterLog(message: "imageFilePath is \(imageFilePath)")
+                BFLog(message: "imageFilePath is \(imageFilePath)")
                 if FileManager.default.fileExists(atPath: imageFilePath) {
                     // 有可能是 WEBP
                     let fileData: Data = try! Data(contentsOf: URL(fileURLWithPath: imageFilePath))
                     if fileData.count != 0, fileData.isWebPFormat {
                         newImage = WebPProcessor.default.process(item: ImageProcessItem.data(fileData), options: KingfisherParsedOptionsInfo([.onlyLoadFirstFrame, .scaleFactor(1)]))
                     }
-                } else { FilterLog(2, message: "文件不存在") }
+                } else { BFLog(2, message: "文件不存在") }
             }
         }
 
         if newImage?.cgImage != nil {
-            FilterLog(message: "提前加载图片。。。。timelineIn : \(String(describing: mSticker?.timelineIn)) timelineOut :\(String(describing: mSticker?.timelineOut)) \(String(describing: mSticker?.locationPath))")
+            BFLog(message: "提前加载图片。。。。timelineIn : \(String(describing: mSticker?.timelineIn)) timelineOut :\(String(describing: mSticker?.timelineOut)) \(String(describing: mSticker?.locationPath))")
 
             //            imageTexture = PQGPUImageTools.setupTexture(image: newImage!.cgImage!)
 
-        } else { FilterLog(2, message: "image filter init error image data is nil!") }
+        } else { BFLog(2, message: "image filter init error image data is nil!") }
 
         // 保证是16的公倍数
         let aptImageSize = NXAVUtil.aptSize(newImage?.size ?? CGSize.zero)
         if !__CGSizeEqualToSize(aptImageSize, newImage?.size ?? CGSize.zero) {
-            FilterLog(2, message: "原图大小宽度不是16的倍数 \(newImage!.size)")
+            BFLog(2, message: "原图大小宽度不是16的倍数 \(newImage!.size)")
             //            newImage = newImage?.nx_scaleToSize(size: aptImageSize)
-            FilterLog(2, message: "归16后大小 \(newImage!.size)")
+            BFLog(2, message: "归16后大小 \(newImage!.size)")
         }
     }
  
@@ -132,23 +132,23 @@ open class PQImageFilter: PQBaseFilter {
         releaseIncomingFramebuffers()
 
         if newImage == nil {
-            FilterLog(2, message: "图片数据有错误!!!! 检查数据\(mSticker!.locationPath)")
+            BFLog(2, message: "图片数据有错误!!!! 检查数据\(mSticker!.locationPath)")
             return
         }
 
-        FilterLog(2, message: " image filter 当前时间: \(currTime) \(newImage!.size)")
+        BFLog(2, message: " image filter 当前时间: \(currTime) \(newImage!.size)")
         
         if(isPointModel){
             
-            FilterLog(2, message: " 显示图片当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  \(String(describing: newImage?.size))")
+            BFLog(2, message: " 显示图片当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  \(String(describing: newImage?.size))")
             // 取纹理坐标
             var textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: mSticker!, textureSize: newImage!.size, cannvasSize: inputSize)
 
-            FilterLog(2, message: "textureCoordinates is \(textureCoordinates) image size :\(newImage!.size) cannvasSize:\(inputSize)  files path is \(mSticker?.locationPath)")
+            BFLog(2, message: "textureCoordinates is \(textureCoordinates) image size :\(newImage!.size) cannvasSize:\(inputSize)  files path is \(mSticker?.locationPath)")
 
             // imageTexture 有可能被析构导致黑屏
             if imageTexture == 0 && newImage?.cgImage != nil {
-                FilterLog(2, message: "imageTexture is error !!!!!重新创建")
+                BFLog(2, message: "imageTexture is error !!!!!重新创建")
                 imageTexture = PQGPUImageTools.setupTexture(image: newImage!.cgImage!)
             }
 
@@ -189,15 +189,15 @@ open class PQImageFilter: PQBaseFilter {
             }
         }else{
             if currTime >= mSticker!.timelineIn && currTime <= mSticker!.timelineOut {
-                FilterLog(2, message: " 显示图片当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  \(String(describing: newImage?.size))")
+                BFLog(2, message: " 显示图片当前时间: \(currTime) 开始时间:\(mSticker!.timelineIn) 结束时间:\(mSticker!.timelineOut)  \(String(describing: newImage?.size))")
                 // 取纹理坐标
                 var textureCoordinates = PQGPUImageTools.getTextureCoordinates(sticker: mSticker!, textureSize: newImage!.size, cannvasSize: inputSize)
 
-                FilterLog(2, message: "textureCoordinates is \(textureCoordinates) image size :\(newImage!.size) cannvasSize:\(inputSize)  files path is \(mSticker?.locationPath)")
+                BFLog(2, message: "textureCoordinates is \(textureCoordinates) image size :\(newImage!.size) cannvasSize:\(inputSize)  files path is \(mSticker?.locationPath)")
 
                 // imageTexture 有可能被析构导致黑屏
                 if imageTexture == 0 && newImage?.cgImage != nil {
-                    FilterLog(2, message: "imageTexture is error !!!!!重新创建")
+                    BFLog(2, message: "imageTexture is error !!!!!重新创建")
                     imageTexture = PQGPUImageTools.setupTexture(image: newImage!.cgImage!)
                 }
 

+ 38 - 38
BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -119,7 +119,7 @@ public class PQMovieFilter: PQBaseFilter {
     public var isPointModel:Bool = false
     
     deinit {
-        FilterLog(1, message: "movie filter release")
+        BFLog(1, message: "movie filter release")
         clearData()
     }
     
@@ -150,7 +150,7 @@ public class PQMovieFilter: PQBaseFilter {
         super.init(fragmentShader: PassthroughFragmentShader, numberOfInputs: 1)
         moveSticker = movieSticker
         stickerInfo = movieSticker
-        FilterLog(2, message: "资源裁剪的 开始时间\(moveSticker!.model_in)  结束时间: \(moveSticker!.out)")
+        BFLog(2, message: "资源裁剪的 开始时间\(moveSticker!.model_in)  结束时间: \(moveSticker!.out)")
         if moveSticker!.videoIsCrop() {
             requestedStartTime = CMTimeMake(value: Int64(moveSticker!.model_in) * Int64(BASE_FILTER_TIMESCALE), timescale: BASE_FILTER_TIMESCALE)
         }
@@ -165,14 +165,14 @@ public class PQMovieFilter: PQBaseFilter {
             if (!videoFilePath.contains("var/mobile/Media")) && (!videoFilePath.contains("BFFramework_Resources.bundle")) {
                 videoFilePath = documensDirectory + videoFilePath
             }
-            FilterLog(2, message: "视频地址 \(String(describing: videoFilePath))")
+            BFLog(2, message: "视频地址 \(String(describing: videoFilePath))")
             try loadAsset(url: URL(fileURLWithPath: videoFilePath), videoComposition: nil)
             
         } catch {
             NXLog(message: "load asset  with error: \(error)")
         }
         
-        FilterLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut)  裁剪开始时间:\(movieSticker.model_in)  裁剪结束时间:\(movieSticker.out)  路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
+        BFLog(2, message: " move FILTER 初始化 开始显示时间:\(movieSticker.timelineIn) 结束显示时间:\(movieSticker.timelineOut)  裁剪开始时间:\(movieSticker.model_in)  裁剪结束时间:\(movieSticker.out)  路径:\(String(describing: movieSticker.locationPath)) 时长 \(CMTimeGetSeconds(asset?.duration ?? .zero))")
         
         startReading()
  
@@ -189,11 +189,11 @@ public class PQMovieFilter: PQBaseFilter {
         inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
         
         currentTime = CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale)
-        FilterLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
+        BFLog(2, message: "wwwwwwwww duration is currentSampleTime is \(CMTimeGetSeconds(currentTime))")
         
         renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: mImageOrientation, size: inputSize, stencil: false)
  
-        FilterLog(message: "maxTextureSize is\(        renderFramebuffer.context.maximumTextureSizeForThisDevice)")
+        BFLog(message: "maxTextureSize is\(        renderFramebuffer.context.maximumTextureSizeForThisDevice)")
       
         
         let textureProperties = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(mImageOrientation).textureCoordinates(), texture: inputFramebuffer.texture)
@@ -204,10 +204,10 @@ public class PQMovieFilter: PQBaseFilter {
                              vertexBufferObject: sharedImageProcessingContext.standardImageVBO, inputTextures: [textureProperties])
         releaseIncomingFramebuffers()
         
-        FilterLog(2, message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
+        BFLog(2, message: "开始显示 movefilter 了 开始\(String(describing: moveSticker?.timelineIn)) 结束 :\(String(describing: moveSticker?.timelineOut)) currentTime \(CMTimeGetSeconds(currentTime)) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) ")
         
         if enableSeek {
-            FilterLog(2, message: "seek 到 \(CMTimeGetSeconds(currentTime))  ")
+            BFLog(2, message: "seek 到 \(CMTimeGetSeconds(currentTime))  ")
             resetRangeTime(startTime: currentTime)
             enableSeek = false
         }
@@ -228,7 +228,7 @@ public class PQMovieFilter: PQBaseFilter {
             var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
             showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
             
-            FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
+            BFLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
             readNextVideoFrame(showTimeStamp: showtimeStamp)
             
             framebufferIndex = framebufferIndex + 1
@@ -246,12 +246,12 @@ public class PQMovieFilter: PQBaseFilter {
                 var showtimeStamp = CMTime(value:targetTime, timescale: BASE_FILTER_TIMESCALE)
                 showtimeStamp = CMTimeAdd(showtimeStamp, stickerModelIn)
                 
-                FilterLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
+                BFLog(message: "showtimeStamp is \(CMTimeGetSeconds(showtimeStamp))")
                 readNextVideoFrame(showTimeStamp: showtimeStamp)
                 
                 framebufferIndex = framebufferIndex + 1
             }else{
-                FilterLog(message: "movefilter 已经不显示了")
+                BFLog(message: "movefilter 已经不显示了")
             }
         }
        
@@ -261,7 +261,7 @@ public class PQMovieFilter: PQBaseFilter {
     // 原视频角度类型
     public func moveAssetRotation() -> NXGPUImageRotationMode {
         let Angle: Int = PQPHAssetVideoParaseUtil.videoRotationAngle(assert: asset!)
-        //        FilterLog(2, message: "原视频素材Angle is \(Angle)")
+        //        BFLog(2, message: "原视频素材Angle is \(Angle)")
         // see https://my.oschina.net/NycoWang/blog/904105
         switch Angle {
         case -90, 270:
@@ -286,14 +286,14 @@ public class PQMovieFilter: PQBaseFilter {
             stickerFPS = asset!.tracks(withMediaType: .video).first?.nominalFrameRate ?? 0.0
             let bitRate = asset!.tracks(withMediaType: .video).first?.estimatedDataRate
             
-            FilterLog(2, message: "move filter asset  fps is \(String(describing: stickerFPS))  bit rate is \(bitRate ?? 0)")
+            BFLog(2, message: "move filter asset  fps is \(String(describing: stickerFPS))  bit rate is \(bitRate ?? 0)")
             
             self.videoComposition = videoComposition
             self.playAtActualSpeed = playAtActualSpeed
             
             yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
             self.audioSettings = audioSettings
-        } else { FilterLog(2, message: "asset is nil") }
+        } else { BFLog(2, message: "asset is nil") }
     }
     
     // MARK: -
@@ -310,7 +310,7 @@ public class PQMovieFilter: PQBaseFilter {
             let videoTrack: AVAssetTrack = asset!.tracks(withMediaType: .video).first!
             
             videoSize = videoTrack.naturalSize
-            FilterLog(2, message: "视频大小为 : \(videoSize)")
+            BFLog(2, message: "视频大小为 : \(videoSize)")
             
             if videoComposition == nil {
                 let readerVideoTrackOutput = AVAssetReaderTrackOutput(track: asset!.tracks(withMediaType: .video).first!, outputSettings: outputSettings)
@@ -324,23 +324,23 @@ public class PQMovieFilter: PQBaseFilter {
             }
             assetReader!.timeRange = CMTimeRange(start: CMTime(value: Int64((moveSticker?.model_in ?? 0) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE), duration: CMTimeMake(value: Int64(((moveSticker?.out ?? 0) - (moveSticker?.model_in ?? 0)) * Float64(BASE_FILTER_TIMESCALE)), timescale: BASE_FILTER_TIMESCALE))
             
-            FilterLog(2, message: "set   assetReader!.timeRange is \(assetReader!.timeRange)")
+            BFLog(2, message: "set   assetReader!.timeRange is \(assetReader!.timeRange)")
             
             return assetReader
         } catch {
-            FilterLog(message:"movie filter ERROR: Unable to create asset reader: \(error)")
+            BFLog(message:"movie filter ERROR: Unable to create asset reader: \(error)")
         }
         return nil
     }
     
     public func startReading() {
-        FilterLog(2, message: "开始初始化")
+        BFLog(2, message: "开始初始化")
         mach_timebase_info(&timebaseInfo)
         
         assetReader?.cancelReading()
         
         guard let assetReader = createReader() else {
-            FilterLog(message: "createReader is error")
+            BFLog(message: "createReader is error")
             return // A return statement in this frame will end thread execution.
         }
         
@@ -350,19 +350,19 @@ public class PQMovieFilter: PQBaseFilter {
                     #if DEBUG
                     cShowHUB(superView: nil, msg: "\(String(describing: assetReader.error))")
                     #endif
-                    FilterLog(message:"ERROR: Unable to start reading: \(String(describing: assetReader.error))")
+                    BFLog(message:"ERROR: Unable to start reading: \(String(describing: assetReader.error))")
                     return
                 }
             }
         } catch {
-            FilterLog(message:"ERROR: Unable to start reading: \(error)")
+            BFLog(message:"ERROR: Unable to start reading: \(error)")
             return
         }
     }
     
     // 设置解码开始时间
     public func resetRangeTime(startTime: CMTime = .zero) {
-        FilterLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
+        BFLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
         requestedStartTime = startTime
         startReading()
     }
@@ -378,12 +378,12 @@ public class PQMovieFilter: PQBaseFilter {
             
             // 最后一帧的PTS > 要显示的目标时间 就不从解码器要数据,直接返回 view 不刷新 只有慢速时会调用
             //        if CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) + (stickerInfo?.model_in ?? 0) && CMTimeGetSeconds(targetTimeStamp) != 0 {
-            FilterLog(2, message: "28797speedRate  目标显示时间 \(String(format: "%.6f", (CMTimeGetSeconds(showTimeStamp)))) 最后显示的时间 \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) speedRate is \(stickerInfo!.speedRate)")
+            BFLog(2, message: "28797speedRate  目标显示时间 \(String(format: "%.6f", (CMTimeGetSeconds(showTimeStamp)))) 最后显示的时间 \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 裁剪开始时间:\(String(describing: moveSticker?.model_in)) speedRate is \(stickerInfo!.speedRate)")
             return
         }
         
         if assetReader == nil {
-            FilterLog(2, message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
+            BFLog(2, message: "assetReader is error 出现严重错误!!!!!!!!!!!!!!")
             return
         }
         
@@ -404,7 +404,7 @@ public class PQMovieFilter: PQBaseFilter {
             count = count + 1
             sampleBuffer = videoTrackOutput!.copyNextSampleBuffer()
             if sampleBuffer == nil {
-                FilterLog(2, message: " copyNextSampleBuffer is nil error!!!")
+                BFLog(2, message: " copyNextSampleBuffer is nil error!!!")
                 return
             }
             targetTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer!)
@@ -413,12 +413,12 @@ public class PQMovieFilter: PQBaseFilter {
             if sampleBuffer != nil && CMTimeGetSeconds(targetTimeStamp) >= CMTimeGetSeconds(showTimeStamp) {
                 let endDecoderTime: TimeInterval = Date().timeIntervalSince1970
                 
-                FilterLog(2, message: " 28797speedRate is \(stickerInfo!.speedRate) 当前主线时间为:\(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 要命中时间:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 差值\(CMTimeGetSeconds(targetTimeStamp) - (stickerInfo?.model_in ?? 0)) 查找耗时为:\(String(format: "%.6f", TimeInterval(endDecoderTime - beginDecoderTime))) 查找次数\(count)  进场时间: \(String(describing: moveSticker?.timelineIn))  裁剪开始时间:\(String(describing: moveSticker?.model_in)) 裁剪结束时间:\(String(describing: moveSticker?.out)) 原视频时长: \(CMTimeGetSeconds(asset?.duration ?? .zero))")
+                BFLog(1, message: " 28797speedRate is \(stickerInfo!.speedRate) 当前主线时间为:\(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out))) 要命中时间:\(CMTimeGetSeconds(showTimeStamp)) 命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp))) 差值\(CMTimeGetSeconds(targetTimeStamp) - (stickerInfo?.model_in ?? 0)) 查找耗时为:\(String(format: "%.6f", TimeInterval(endDecoderTime - beginDecoderTime))) 查找次数\(count)  进场时间: \(String(describing: moveSticker?.timelineIn))  裁剪开始时间:\(String(describing: moveSticker?.model_in)) 裁剪结束时间:\(String(describing: moveSticker?.out)) 原视频时长: \(CMTimeGetSeconds(asset?.duration ?? .zero))")
                 break
                 
             }
             //            else {
-            //                FilterLog(2, message: "不丢帧显示  查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out)))  命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
+            //                BFLog(2, message: "不丢帧显示  查找的帧时间为:\(String(format: "%.6f", CMTimeGetSeconds(showTimeStamp).truncatingRemainder(dividingBy: moveSticker!.out)))  命中时间为: \(String(format: "%.6f", CMTimeGetSeconds(targetTimeStamp)))")
             ////                usleep(2)
             ////                sharedImageProcessingContext.runOperationSynchronously {
             ////                    self.renderPixelBuffler(movieFrame: CMSampleBufferGetImageBuffer(sampleBuffer!)!, withSampleTime: currentTime)
@@ -437,23 +437,23 @@ public class PQMovieFilter: PQBaseFilter {
             }
             return
         } else {
-            FilterLog(2, message: "sampleBuffer is  nil data is error self.assetReader?.status is \(String(describing: assetReader?.status))")
+            BFLog(2, message: "sampleBuffer is  nil data is error self.assetReader?.status is \(String(describing: assetReader?.status))")
         }
         // 二, 已经播放完一次
         if assetReader?.status == .completed {
-            FilterLog(message: "已经播放完一次")
+            BFLog(message: "已经播放完一次")
             // 1 自动循环模式 重头开始循环
             if moveSticker?.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
-                FilterLog(2, message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
+                BFLog(2, message: "自动循环模式 重头开始循环 \(CMTimeGetSeconds(currentTime))")
                 
                 startReading()
                 
             } else if moveSticker?.materialDurationFit?.fitType == adapterMode.staticFrame.rawValue {
                 // 2),定帧处理
                 if lastImageBuffer != nil {
-                    FilterLog(2, message: "处理显示定帧")
+                    BFLog(2, message: "处理显示定帧")
                     let currTime = CMTimeGetSeconds(currentTime)
-                    FilterLog(2, message: "process time is \(currTime)")
+                    BFLog(2, message: "process time is \(currTime)")
                     sharedImageProcessingContext.runOperationSynchronously { [weak self] in
                         if let imgBuffer = self?.lastImageBuffer {
                             renderPixelBuffler(movieFrame: imgBuffer, withSampleTime: currentTime)
@@ -472,7 +472,7 @@ public class PQMovieFilter: PQBaseFilter {
 //        let image = UIImage.init(pixelBuffer: movieFrame)
 //        image!.saveImage(currentImage: image!, persent: 0.5, imageName: "\(CMTimeGetSeconds(withSampleTime))")
         // NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
-        FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
+        BFLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
         
         let bufferHeight = CVPixelBufferGetHeight(movieFrame)
         let bufferWidth = CVPixelBufferGetWidth(movieFrame)
@@ -490,7 +490,7 @@ public class PQMovieFilter: PQBaseFilter {
         
         if luminanceGLTextureResult != kCVReturnSuccess || luminanceGLTexture == nil {
         
-            FilterLog(message:"ERROR: Could not create LuminanceGLTexture")
+            BFLog(message:"ERROR: Could not create LuminanceGLTexture")
             return
         }
         
@@ -505,7 +505,7 @@ public class PQMovieFilter: PQBaseFilter {
         do {
             luminanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: luminanceTexture)
         } catch {
-            FilterLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
+            BFLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
             return
         }
         luminanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
@@ -518,7 +518,7 @@ public class PQMovieFilter: PQBaseFilter {
         let chrominanceGLTextureResult = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, sharedImageProcessingContext.coreVideoTextureCache, movieFrame, nil, GLenum(GL_TEXTURE_2D), GL_LUMINANCE_ALPHA, GLsizei(bufferWidth / 2), GLsizei(bufferHeight / 2), GLenum(GL_LUMINANCE_ALPHA), GLenum(GL_UNSIGNED_BYTE), 1, &chrominanceGLTexture)
         
         if chrominanceGLTextureResult != kCVReturnSuccess || chrominanceGLTexture == nil {
-            FilterLog(message:"ERROR: Could not create ChrominanceGLTexture")
+            BFLog(message:"ERROR: Could not create ChrominanceGLTexture")
             return
         }
         
@@ -532,7 +532,7 @@ public class PQMovieFilter: PQBaseFilter {
         do {
             chrominanceFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(bufferWidth), height: GLint(bufferHeight)), textureOnly: true, overriddenTexture: chrominanceTexture)
         } catch {
-            FilterLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
+            BFLog(message:"ERROR: Could not create a framebuffer of the size (\(bufferWidth), \(bufferHeight)), error: \(error)")
             return
         }
         chrominanceFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
@@ -544,7 +544,7 @@ public class PQMovieFilter: PQBaseFilter {
         convertYUVToRGBAK(shader: yuvConversionShader!, luminanceFramebuffer: luminanceFramebuffer, chrominanceFramebuffer: chrominanceFramebuffer, resultFramebuffer: movieFramebuffer, colorConversionMatrix: conversionMatrix)
         CVPixelBufferUnlockBaseAddress(movieFrame, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
         
-        FilterLog(2, message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
+        BFLog(2, message: "mp4 render process time is \(CMTimeGetSeconds(withSampleTime))")
         movieFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(withSampleTime))
         
         movieFramebuffer.userInfo = framebufferUserInfo

+ 47 - 47
BFFramework/Classes/PQGPUImage/akfilters/PQMovieInput.swift

@@ -141,7 +141,7 @@ public class PQMovieInput: ImageSource {
      // 画布的大小 注意要是偶数 要不在 IOS 13上会有绿边 自动放大到偶数
     public var mShowVidoSize: CGSize = cVideoCannvasSizeOneToOne {
         didSet {
-            FilterLog(2, message: "mShowVidoSize is move input  \(mShowVidoSize)")
+            BFLog(2, message: "mShowVidoSize is move input  \(mShowVidoSize)")
             do {
                 displayLink?.isPaused = true
                 imageFramebuffer = try Framebuffer(context: sharedImageProcessingContext, orientation: .portrait, size: GLSize(width: GLint(mShowVidoSize.width), height: GLint(mShowVidoSize.height)), textureOnly: true)
@@ -155,7 +155,7 @@ public class PQMovieInput: ImageSource {
     // 初始化方法
     public init(asset: AVAsset, videoComposition: AVVideoComposition?, audioMix: AVAudioMix?, playAtActualSpeed: Bool = false, loop: Bool = false, audioSettings: [String: Any]? = nil) throws {
         self.asset = asset
-        FilterLog(2, message: "asset 资源的总时长\(asset.duration.seconds) \(asset.duration)")
+        BFLog(2, message: "asset 资源的总时长\(asset.duration.seconds) \(asset.duration)")
         self.audioMix = audioMix
         self.audioSettings = audioSettings
         self.videoComposition = videoComposition
@@ -163,7 +163,7 @@ public class PQMovieInput: ImageSource {
         self.loop = loop
         yuvConversionShader = crashOnShaderCompileFailure("MovieInput") { try sharedImageProcessingContext.programForVertexShader(defaultVertexShaderForInputs(2), fragmentShader: YUVConversionFullRangeFragmentShader) }
         if asset.duration.seconds <= 0 {
-            FilterLog(2, message: "asset 资源的总时长为0,返回")
+            BFLog(2, message: "asset 资源的总时长为0,返回")
             return
         }
   
@@ -200,7 +200,7 @@ public class PQMovieInput: ImageSource {
         self.audioInputStatusObserver?.invalidate()
         self.avPlayerTimeObserver?.invalidate()
 
-        FilterLog(1, message: "movieinput release")
+        BFLog(1, message: "movieinput release")
     }
 
     // MARK: -
@@ -212,7 +212,7 @@ public class PQMovieInput: ImageSource {
         isPlay = false
         beginTime = 0
         currentTime = .zero
-        FilterLog(2, message: "初始化播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end)) 播放总时长:\(CMTimeGetSeconds(timeRange.end) - CMTimeGetSeconds(timeRange.start))")
+        BFLog(2, message: "初始化播放开始时间、\(CMTimeGetSeconds(timeRange.start)) 结束时间\(CMTimeGetSeconds(timeRange.end)) 播放总时长:\(CMTimeGetSeconds(timeRange.end) - CMTimeGetSeconds(timeRange.start))")
   
         playeTimeRange = timeRange
         startTime = playeTimeRange.start
@@ -222,7 +222,7 @@ public class PQMovieInput: ImageSource {
 
     @objc func displayLinkClick(_ displayLink: CADisplayLink) {
         if assetReader == nil {
-            FilterLog(2, message: "self.assetReader is null !!!!!!!!!!")
+            BFLog(2, message: "self.assetReader is null !!!!!!!!!!")
             displayLink.isPaused = true
             return
         }
@@ -245,7 +245,7 @@ public class PQMovieInput: ImageSource {
                 if(midTime < 0.0001){
                     midTime = 0
                 }
-                FilterLog(message: "CFAbsoluteTimeGetCurrent()\(CFAbsoluteTimeGetCurrent()) - self.beginTime  is:::::\(self.beginTime) 差值 \(midTime)")
+                BFLog(message: "CFAbsoluteTimeGetCurrent()\(CFAbsoluteTimeGetCurrent()) - self.beginTime  is:::::\(self.beginTime) 差值 \(midTime)")
               
                 self.currentTime = CMTimeMakeWithSeconds(midTime +  CMTimeGetSeconds(startTime ?? CMTime.zero), preferredTimescale: BASE_FILTER_TIMESCALE)
             }else {
@@ -257,12 +257,12 @@ public class PQMovieInput: ImageSource {
   
             let prgressValue = currTime / Float64(duration)
             
-            FilterLog(2, message: "\(mIsExport) MovieOutput total frames appended:播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放器开始时间:\(CMTimeGetSeconds(playeTimeRange.start)) 播放器原始结束时间:\(CMTimeGetSeconds(playeTimeRange.end))    总时间:\(CMTimeGetSeconds(playeTimeRange.end) - CMTimeGetSeconds(playeTimeRange.start)) 播放进度当前时间:\(currTime) 进度:\(prgressValue) 帧id \(totalFramesSent)")
+            BFLog(2, message: "\(mIsExport) MovieOutput total frames appended:播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放器开始时间:\(CMTimeGetSeconds(playeTimeRange.start)) 播放器原始结束时间:\(CMTimeGetSeconds(playeTimeRange.end))    总时间:\(CMTimeGetSeconds(playeTimeRange.end) - CMTimeGetSeconds(playeTimeRange.start)) 播放进度当前时间:\(currTime) 进度:\(prgressValue) 帧id \(totalFramesSent)")
             totalFramesSent += 1
     
-            FilterLog(2, message: "2222222222播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放进度当前时间:\(currTime) 总时间为:\(duration)进度:\(prgressValue) 音频时长:\(    CMTimeGetSeconds(asset.duration) )")
+            BFLog(2, message: "2222222222播放进行中 总用时: \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) 播放进度当前时间:\(currTime) 总时间为:\(duration)进度:\(prgressValue) 音频时长:\(    CMTimeGetSeconds(asset.duration) )")
             if currTime / duration > 1{
-                FilterLog(2, message: "全部播放完成 总用时为:\(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
+                BFLog(2, message: "全部播放完成 总用时为:\(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
                 if(self.completion != nil){
                     self.completion?()
                 }
@@ -276,7 +276,7 @@ public class PQMovieInput: ImageSource {
                 if !mIsExport {
                     self.start(isFreeBuffer: true,timeRange: playeTimeRange)
                 }else{
-                    FilterLog(message: "强制停止!!!!")
+                    BFLog(message: "强制停止!!!!")
                     displayLink.isPaused = true
                     return
                 }
@@ -286,9 +286,9 @@ public class PQMovieInput: ImageSource {
                 self.conditionLock.lock()
                 while self.readingShouldWait {
                     self.synchronizedEncodingDebugPrint("Disable reading")
-                    FilterLog(2, message: "Disable reading 开始等待 \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) ")
+                    BFLog(2, message: "Disable reading 开始等待 \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0)) ")
                     self.conditionLock.wait()
-                    FilterLog(2, message: "Enable reading  停止等待 \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
+                    BFLog(2, message: "Enable reading  停止等待 \(CFAbsoluteTimeGetCurrent() - (debugStartTime ?? 0.0))")
                 }
                 self.conditionLock.unlock()
 
@@ -321,11 +321,11 @@ public class PQMovieInput: ImageSource {
     @objc public func start(isFreeBuffer: Bool, isExport: Bool = false,timeRange:CMTimeRange = CMTimeRange.init()) {
         
         debugStartTime = CFAbsoluteTimeGetCurrent()
-        FilterLog(2, message: "开始播放的系统时钟时间 \(String(describing: debugStartTime))")
+        BFLog(2, message: "开始播放的系统时钟时间 \(String(describing: debugStartTime))")
         
         playeTimeRange = timeRange
         readerAudioTrackOutput = nil
-        FilterLog(2, message: "PQMoveInput开始")
+        BFLog(2, message: "PQMoveInput开始")
         mFreeBuffer = isFreeBuffer
         assetReader = createReader()
     
@@ -333,7 +333,7 @@ public class PQMovieInput: ImageSource {
 
         isPlay = true
         if assetReader == nil {
-            FilterLog(2, message: "assetReader is null!!!!!")
+            BFLog(2, message: "assetReader is null!!!!!")
             return
         }
 
@@ -342,7 +342,7 @@ public class PQMovieInput: ImageSource {
             try NSObject.catchException { [self] in
                 if(!isUsedAVPlayer){
                     guard self.assetReader.startReading() else {
-                        FilterLog(2, message: "ERROR: Unable to start reading: \(String(describing: self.assetReader.error))")
+                        BFLog(2, message: "ERROR: Unable to start reading: \(String(describing: self.assetReader.error))")
                         return
                     }
                 }else{
@@ -354,19 +354,19 @@ public class PQMovieInput: ImageSource {
               
             }
         } catch {
-            FilterLog(2, message: "ERROR: Unable to start reading: \(error)")
+            BFLog(2, message: "ERROR: Unable to start reading: \(error)")
             return
         }
 
-        FilterLog(2, message: "assetReader.outputs count is \(assetReader.outputs)")
+        BFLog(2, message: "assetReader.outputs count is \(assetReader.outputs)")
         for output in assetReader.outputs {
             if output.mediaType == AVMediaType.video {
                 readerVideoTrackOutput = output
-                FilterLog(2, message: " set  readerVideoTrackOutput")
+                BFLog(2, message: " set  readerVideoTrackOutput")
             }
             if output.mediaType == AVMediaType.audio {
                 readerAudioTrackOutput = output
-                FilterLog(2, message: " set  readerAudioTrackOutput")
+                BFLog(2, message: " set  readerAudioTrackOutput")
             }
         }
 
@@ -383,7 +383,7 @@ public class PQMovieInput: ImageSource {
     }
 
     public func cancel() {
-        FilterLog(2, message: "PQMoveInput取消")
+        BFLog(2, message: "PQMoveInput取消")
         isPlay = false
 
         // 将定时器移除主循环
@@ -398,7 +398,7 @@ public class PQMovieInput: ImageSource {
     }
 
     public func resume() {
-        FilterLog(2, message: "PQMoveInput恢复播放")
+        BFLog(2, message: "PQMoveInput恢复播放")
         mFreeBuffer = false
         isPlay = true
         if !mIsExport{
@@ -413,15 +413,15 @@ public class PQMovieInput: ImageSource {
 
     public func pause() {
         if !isPlay {
-            FilterLog(2, message: "还不是播放状态")
+            BFLog(2, message: "还不是播放状态")
             return
         }
 
         if displayLink == nil {
-            FilterLog(2, message: "displayLink is erorr displaye bug !!!!")
+            BFLog(2, message: "displayLink is erorr displaye bug !!!!")
             return
         }
-        FilterLog(2, message: "PQMoveInput暂停 displayLink.timestamp:  \(displayLink!.timestamp)")
+        BFLog(2, message: "PQMoveInput暂停 displayLink.timestamp:  \(displayLink!.timestamp)")
         isPlay = false
 
         // 暂停帧的刷新 true:停 ; false:开始
@@ -437,7 +437,7 @@ public class PQMovieInput: ImageSource {
             startTime = currentTime
         }
 
-        FilterLog(2, message: "暂停时间:\(currTime)")
+        BFLog(2, message: "暂停时间:\(currTime)")
 
         beginTime = 0
         
@@ -453,10 +453,10 @@ public class PQMovieInput: ImageSource {
     func createReader() -> AVAssetReader? {
         do {
             let assetReader = try AVAssetReader(asset: asset)
-            FilterLog(2, message: "assetReader init \(assetReader)  asset url is \(asset)")
+            BFLog(2, message: "assetReader init \(assetReader)  asset url is \(asset)")
             if audioMix == nil {
                 if let audioTrack = asset.tracks(withMediaType: .audio).first, let _ = audioEncodingTarget {
-                    FilterLog(1, message: "audioTrack start \(audioTrack.timeRange.start) \(audioTrack.timeRange.duration.value)")
+                    BFLog(1, message: "audioTrack start \(audioTrack.timeRange.start) \(audioTrack.timeRange.duration.value)")
 
                     let readerAudioTrackOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: audioSettings)
                     readerAudioTrackOutput.alwaysCopiesSampleData = false
@@ -467,7 +467,7 @@ public class PQMovieInput: ImageSource {
                 }
 
             } else {
-                FilterLog(2, message: "self.asset.tracks is \(asset.tracks.count)")
+                BFLog(2, message: "self.asset.tracks is \(asset.tracks.count)")
                 let readerAudioTrackOutput = AVAssetReaderAudioMixOutput(audioTracks: asset.tracks(withMediaType: .audio), audioSettings: audioSettings)
                 readerAudioTrackOutput.audioMix = audioMix
                 readerAudioTrackOutput.alwaysCopiesSampleData = false
@@ -475,21 +475,21 @@ public class PQMovieInput: ImageSource {
             }
 
             assetReader.timeRange = playeTimeRange
-            FilterLog(2, message: "初始化播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end)) 音乐的总时长\(asset.duration.seconds)")
+            BFLog(2, message: "初始化播放器开始时间\(CMTimeGetSeconds(assetReader.timeRange.start)) 结束时间\(CMTimeGetSeconds(assetReader.timeRange.end)) 音乐的总时长\(asset.duration.seconds)")
        
 
             actualStartTime = nil
 
             return assetReader
         } catch {
-            FilterLog(2, message: "ERROR: Unable to create asset reader: \(error)")
+            BFLog(2, message: "ERROR: Unable to create asset reader: \(error)")
         }
         return nil
     }
 
     func readNextVideoFrame(with _: AVAssetReader) {
         
-        FilterLog(2, message: "视频解码状态\(assetReader.status.rawValue)")
+        BFLog(2, message: "视频解码状态\(assetReader.status.rawValue)")
      
         autoreleasepool {
             synchronizedEncodingDebugPrint("Process frame input")
@@ -501,7 +501,7 @@ public class PQMovieInput: ImageSource {
             
     
             let prgressValue = (currTime - start) / (duration - start)
-//            FilterLog(1, message: "\(mIsExport) movinput 当前时间 is \(currTime) curr当前进度:\(prgressValue)")
+//            BFLog(1, message: "\(mIsExport) movinput 当前时间 is \(currTime) curr当前进度:\(prgressValue)")
             progress?(currTime, duration, prgressValue)
 
             sharedImageProcessingContext.runOperationSynchronously { [weak self] in
@@ -513,12 +513,12 @@ public class PQMovieInput: ImageSource {
     func readNextAudioSample(with assetReader: AVAssetReader, from audioTrackOutput: AVAssetReaderOutput) {
         
         if(isUsedAVPlayer){
-            FilterLog(2, message: "使用的 avplayer 播放模式")
+            BFLog(2, message: "使用的 avplayer 播放模式")
             return
         }
        
         if !isPlay {
-            FilterLog(2, message: "自动停到首帧的不处理音频")
+            BFLog(2, message: "自动停到首帧的不处理音频")
             return
         }
         /*
@@ -527,35 +527,35 @@ public class PQMovieInput: ImageSource {
          case failed = 3
          case cancelled = 4
          */
-        FilterLog(2, message: "音频解码状态\(assetReader.status.rawValue)")
+        BFLog(2, message: "音频解码状态\(assetReader.status.rawValue)")
         
         autoreleasepool {
             guard let sampleBuffer = audioTrackOutput.copyNextSampleBuffer(),CMSampleBufferIsValid(sampleBuffer) else {
                 if(assetReader.status == .completed){
-                    FilterLog(2, message: "提前结束的了!!!!\(String(describing: assetReader.error))")
+                    BFLog(2, message: "提前结束的了!!!!\(String(describing: assetReader.error))")
                 }
                 if let movieOutput = synchronizedMovieOutput {
-                    FilterLog(2, message: "this is runing assetWriterAudioInput  markAsFinished \(String(describing: assetReader.error)) \(assetReader)")
+                    BFLog(2, message: "this is runing assetWriterAudioInput  markAsFinished \(String(describing: assetReader.error)) \(assetReader)")
 
                     movieOutput.movieProcessingContext.runOperationAsynchronously {
                         movieOutput.audioEncodingIsFinished = true
                         movieOutput.assetWriterAudioInput?.markAsFinished()
                     }
                 }
-                FilterLog(2, message: "sampleBuffer is null 速度太快copy is error")
+                BFLog(2, message: "sampleBuffer is null 速度太快copy is error")
                 return
             }
 
             synchronizedEncodingDebugPrint("Process audio sample input")
 
             let currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer)
-            FilterLog(2, message: "处理音频的时间戳 \(CMTimeGetSeconds(currentSampleTime)) 播放时间\(CMTimeGetSeconds(currentTime))")
+            BFLog(2, message: "处理音频的时间戳 \(CMTimeGetSeconds(currentSampleTime)) 播放时间\(CMTimeGetSeconds(currentTime))")
 
             // https://www.itdaan.com/blog/2013/11/28/deb10f90970a5ea33f185c9faf2a0ab3.html
             if !mFreeBuffer {
                 audioEncodingTarget?.processAudioBuffer(sampleBuffer)
             }else{
-                FilterLog(message: "不播放音频!!!!")
+                BFLog(message: "不播放音频!!!!")
             }
         }
     }
@@ -565,7 +565,7 @@ public class PQMovieInput: ImageSource {
             let startPTime = CFAbsoluteTimeGetCurrent()
 
             imageFramebuffer.lock()
-            FilterLog(message: "mIsExport:\(mIsExport) 实际设置的每一帧时间戳:\(CMTimeGetSeconds(currentTime))")
+            BFLog(message: "mIsExport:\(mIsExport) 实际设置的每一帧时间戳:\(CMTimeGetSeconds(currentTime))")
             // 设置当前帧的时间戳
             imageFramebuffer.timingStyle = .videoFrame(timestamp: Timestamp(currentTime))
  
@@ -582,7 +582,7 @@ public class PQMovieInput: ImageSource {
                 totalFrameTimeDuringCapture += currentFrameTime
                 
                 
-                FilterLog(2, message: "currentTime is \(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 当前帧渲染时间 : \(String(format: "%.6f",1000.0 * currentFrameTime)) ms Average frame time : \(String(format: "%.6f", 1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent))) ms  totalFrameTimeDuringCapture is \(String(format: "%.6f",totalFrameTimeDuringCapture))")
+                BFLog(2, message: "currentTime is \(String(format: "%.6f", CMTimeGetSeconds(currentTime))) 当前帧渲染时间 : \(String(format: "%.6f",1000.0 * currentFrameTime)) ms Average frame time : \(String(format: "%.6f", 1000.0 * totalFrameTimeDuringCapture / Double(totalFramesSent))) ms  totalFrameTimeDuringCapture is \(String(format: "%.6f",totalFrameTimeDuringCapture))")
              
             
 
@@ -638,7 +638,7 @@ public class PQMovieInput: ImageSource {
             conditionLock.signal()
             
         } else {
-            FilterLog(1, message: "MovieOutput total frames appended 要加锁了")
+            BFLog(1, message: "MovieOutput total frames appended 要加锁了")
             readingShouldWait = true
         }
         conditionLock.unlock()
@@ -730,7 +730,7 @@ extension PQMovieInput {
                 let range = strongSelf.musicPlayRanges?[strongSelf.indexRage]
                 playerItem.forwardPlaybackEndTime = range!.end
                 playerItem.reversePlaybackEndTime = range!.start
-//                FilterLog(1, message: "curr: start ********************\(CMTimeGetSeconds(range.start)) - \(playerItem.reversePlaybackEndTime) - \(playerItem.forwardPlaybackEndTime)")
+//                BFLog(1, message: "curr: start ********************\(CMTimeGetSeconds(range.start)) - \(playerItem.reversePlaybackEndTime) - \(playerItem.forwardPlaybackEndTime)")
                 strongSelf.avPlayer!.seek(to: playerItem.reversePlaybackEndTime) { isSuccess in
                     playerItem.seek(to: playerItem.reversePlaybackEndTime) { isSuccess in
                         strongSelf.avPlayer!.play()
@@ -743,7 +743,7 @@ extension PQMovieInput {
         
 //        avPlayerTimeObserver = avPlayer!.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 4), queue: DispatchQueue.global()) {[weak self] time in
 //         //    进度监控
-//            FilterLog(1, message: "cont:\(CMTimeGetSeconds(time) - CMTimeGetSeconds((self?.musicPlayRanges?.first!.start)!)), curr:\(CMTimeGetSeconds(time))")
+//            BFLog(1, message: "cont:\(CMTimeGetSeconds(time) - CMTimeGetSeconds((self?.musicPlayRanges?.first!.start)!)), curr:\(CMTimeGetSeconds(time))")
 //        } as? NSKeyValueObservation
     }
     

+ 6 - 6
BFFramework/Classes/PQGPUImage/akfilters/PQTextFilter.swift

@@ -20,7 +20,7 @@ open class PQTextFilter: PQBaseFilter {
 
     var subtitleImage: UIImage?
     deinit {
-        FilterLog(message: "字幕析构 ")
+        BFLog(message: "字幕析构 ")
         clearData()
     }
 
@@ -59,7 +59,7 @@ open class PQTextFilter: PQBaseFilter {
                 subtitleLab.numberOfLines = 2
                 subtitleLab.lineBreakMode = .byWordWrapping
 
-                FilterLog(message: "字幕初始化时大小 \(subtitleLab.frame)")
+                BFLog(message: "字幕初始化时大小 \(subtitleLab.frame)")
                 subtitleLab.backgroundColor = UIColor(red: 1, green: 1, blue: 1, alpha: 0.5)
                 subtitleLab.alpha = 1
           
@@ -81,7 +81,7 @@ open class PQTextFilter: PQBaseFilter {
                 self?.subtitleImage = UIGraphicsGetImageFromCurrentImageContext() ?? UIImage()
                 UIGraphicsEndImageContext()
                 
-                FilterLog(message: "合成图片大小: \(String(describing: self?.subtitleImage?.size))")
+                BFLog(message: "合成图片大小: \(String(describing: self?.subtitleImage?.size))")
 
             }
  
@@ -100,7 +100,7 @@ open class PQTextFilter: PQBaseFilter {
         let inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
 
         let currTime = CMTimeGetSeconds(CMTime(value: inputFramebuffer.timingStyle.timestamp!.value, timescale: inputFramebuffer.timingStyle.timestamp!.timescale))
-        FilterLog(message: "subtitle 当前时间: \(currTime)")
+        BFLog(message: "subtitle 当前时间: \(currTime)")
 
         // 原有画布
         renderFramebuffer = sharedImageProcessingContext.framebufferCache.requestFramebufferWithProperties(orientation: .portrait, size: inputSize, stencil: false)
@@ -114,7 +114,7 @@ open class PQTextFilter: PQBaseFilter {
         releaseIncomingFramebuffers()
 
         if subTitleTexture != 0 {
-            FilterLog(message: "subTitleTexture 有值可以正常显示")
+            BFLog(message: "subTitleTexture 有值可以正常显示")
             let texturePropertiesimagetwo = InputTextureProperties(textureCoordinates: inputFramebuffer.orientation.rotationNeededForOrientation(.portrait).textureCoordinates(), texture: subTitleTexture)
  
             let verticesPoint = PQGPUImageTools.computeVertices(viewSize: CGSize.init(width: CGFloat(inputSize.width), height: CGFloat(inputSize.height)), _bounds: CGRect.init(x: stickerInfo?.materialPosition?.x ?? 0, y:  stickerInfo?.materialPosition?.y ?? 0, width: stickerInfo?.materialPosition?.width ?? 0, height: stickerInfo?.materialPosition?.height ?? 0))
@@ -126,7 +126,7 @@ open class PQTextFilter: PQBaseFilter {
 
             releaseIncomingFramebuffers()
         }else{
-            FilterLog(message: "subTitleTexture is nil!!!!!")
+            BFLog(message: "subTitleTexture is nil!!!!!")
         }
     }
 

+ 4 - 1
BFFramework/Classes/selectImage/PQImageCropVC.swift → BFFramework/Classes/SelectImage/PQImageCropVC.swift

@@ -44,7 +44,10 @@ class PQImageCropVC: BFBaseViewController, UIScrollViewDelegate {
     func setupView() {
         let holeWidth = view.frame.width - gap
         BFLog(message: "aspectH :\(String(describing: aspectH))")
-        let holeHeight = holeWidth * aspectH / aspectW
+        var holeHeight = holeWidth * aspectH / aspectW
+        if holeHeight.isNaN {
+            holeHeight = 0
+        }
         if img.imageOrientation != .up {
             UIGraphicsBeginImageContextWithOptions(img.size, false, img.scale)
             var rect = CGRect.zero

+ 0 - 0
BFFramework/Classes/selectImage/PQImageSelectedController.swift → BFFramework/Classes/SelectImage/PQImageSelectedController.swift


+ 0 - 0
BFFramework/Classes/selectImage/PQSelecteVideoItemCell.swift → BFFramework/Classes/SelectImage/PQSelecteVideoItemCell.swift


+ 52 - 60
BFFramework/Classes/selectImage/PQUploadController.swift → BFFramework/Classes/SelectImage/PQUploadController.swift

@@ -6,10 +6,10 @@
 //  Copyright © 2020 BytesFlow. All rights reserved.
 //
 
-import BFUIKit
 import MobileCoreServices
 import Photos
 import UIKit
+import BFCommonKit
 
 let playerHeaderH: CGFloat = cScreenWidth * (250 / 375)
 
@@ -18,6 +18,7 @@ open class PQUploadController: BFBaseViewController {
     public var maxWidth: CGFloat = cScreenWidth
     // 最大的高度
     public var maxHeight: CGFloat = adapterWidth(width: 300)
+    public var jumptoPublicHandle:((_ selectData:PQUploadModel?) -> Void)?
     // 画面比例
     public var aspectRatio: aspectRatio?
     public var preViewSize: CGSize {
@@ -135,10 +136,8 @@ open class PQUploadController: BFBaseViewController {
         let backBtn = UIButton(type: .custom)
         backBtn.frame = CGRect(x: 0, y: cDevice_iPhoneStatusBarHei, width: cDefaultMargin * 4, height: cDefaultMargin * 4)
         backBtn.imageEdgeInsets = UIEdgeInsets(top: 0, left: 0, bottom: -5, right: 0)
-
-        backBtn.setImage(UIImage.moduleImage(named: "icon_blanc_back", moduleName: "BFFramework", isAssets: false)?.withRenderingMode(.alwaysTemplate), for: .normal)
+        backBtn.setImage(UIImage(named: "ic_close_black"), for: .normal)
         backBtn.addTarget(self, action: #selector(backBtnClick), for: .touchUpInside)
-        backBtn.imageView?.tintColor = BFConfig.shared.styleTitleColor
         return backBtn
     }()
 
@@ -154,26 +153,21 @@ open class PQUploadController: BFBaseViewController {
         let emptyData = BFEmptyModel()
         emptyData.title = "哦呜~ 你没有可上传的视频~"
         emptyData.emptyImageName = "video_empty"
-        emptyData.netDisRefreshBgColor = UIColor.hexColor(hexadecimal: "#FA6400")
-        emptyData.netDisTitle = "内容加载失败"
-        emptyData.netDisTitleColor = UIColor.hexColor(hexadecimal: "#333333")
-        emptyData.netemptyDisImage = UIImage(named: "empty_netDis_icon")
-        emptyData.netDisRefreshTitle = NSMutableAttributedString(string: "重新加载", attributes: [.font: UIFont.systemFont(ofSize: 16, weight: .medium), .foregroundColor: UIColor.white])
         return emptyData
     }()
 
     public lazy var emptyRemindView: BFEmptyRemindView = {
-        let remindView = BFEmptyRemindView(frame: CGRect(x: 0, y: cDevice_iPhoneNavBarAndStatusBarHei, width: cScreenWidth, height: cScreenHeigth - cDevice_iPhoneNavBarAndStatusBarHei))
-        remindView.isHidden = true
-        remindView.emptyData = anthorEmptyData
-        view.addSubview(remindView)
-        remindView.fullRefreshBloc = { [weak self] _, _ in
+        let emptyRemindView = BFEmptyRemindView(frame: CGRect(x: 0, y: cDevice_iPhoneNavBarAndStatusBarHei, width: cScreenWidth, height: cScreenHeigth - cDevice_iPhoneNavBarAndStatusBarHei))
+        emptyRemindView.isHidden = true
+        emptyRemindView.emptyData = anthorEmptyData
+        view.addSubview(emptyRemindView)
+        emptyRemindView.fullRefreshBloc = { [weak self, weak emptyRemindView] _, _ in
             self?.isJumpToAuthorization = true
-            if self?.emptyRemindView.refreshBtn.currentTitle == "授予权限" {
+            if emptyRemindView?.refreshBtn.currentTitle == "授予权限" {
                 openAppSetting()
             }
         }
-        return remindView
+        return emptyRemindView
     }()
 
     public lazy var collectionView: UICollectionView = {
@@ -189,7 +183,7 @@ open class PQUploadController: BFBaseViewController {
         } else {
             automaticallyAdjustsScrollViewInsets = false
         }
-        collectionView.backgroundColor = .clear
+        collectionView.backgroundColor = UIColor.hexColor(hexadecimal: "#191919")
         return collectionView
     }()
 
@@ -293,7 +287,7 @@ open class PQUploadController: BFBaseViewController {
         playerHeaderView.isUserInteractionEnabled = true
         playerHeaderView.contentMode = .scaleAspectFit
         playerHeaderView.clipsToBounds = true
-        playerHeaderView.backgroundColor = UIColor.yellow
+        playerHeaderView.backgroundColor = UIColor.black
         return playerHeaderView
     }()
 
@@ -301,23 +295,20 @@ open class PQUploadController: BFBaseViewController {
         let selecteBtn = UIButton(frame: CGRect(x: deleteBtn.frame.maxX + cDefaultMargin, y: 0, width: cScreenWidth - nextBtn.frame.width - deleteBtn.frame.maxX - cDefaultMargin * 5, height: cDevice_iPhoneTabBarHei))
         selecteBtn.titleLabel?.lineBreakMode = .byTruncatingTail
         selecteBtn.setTitle("全部", for: .normal)
-
-        selecteBtn.setImage(UIImage.moduleImage(named: "icon_uploadVideo_more", moduleName: "BFFramework", isAssets: false)?.withRenderingMode(.alwaysTemplate), for: .normal)
-        selecteBtn.setTitleColor(BFConfig.shared.styleTitleColor, for: .normal)
+        selecteBtn.setImage(UIImage(named: "icon_uploadVideo_more"), for: .normal)
+        selecteBtn.setTitleColor(UIColor.white, for: .normal)
         selecteBtn.titleLabel?.font = UIFont.systemFont(ofSize: 16, weight: .medium)
         selecteBtn.tag = 2
-        selecteBtn.imagePosition(at: PQButtonImageEdgeInsetsStyle.right, space: cDefaultMargin / 2)
-        selecteBtn.imageView?.tintColor = BFConfig.shared.styleTitleColor
+        selecteBtn.imagePosition(at: .right, space: cDefaultMargin / 2)
         selecteBtn.addTarget(self, action: #selector(btnClick(sender:)), for: .touchUpInside)
         return selecteBtn
     }()
 
     public lazy var deleteBtn: UIButton = {
         let deleteBtn = UIButton(frame: CGRect(x: cDefaultMargin, y: 0, width: cDefaultMargin * 4, height: cDevice_iPhoneTabBarHei))
-        deleteBtn.setImage(UIImage.moduleImage(named: "icon_blanc_back", moduleName: "BFFramework", isAssets: false)?.withRenderingMode(.alwaysTemplate), for: .normal)
+        deleteBtn.setImage(UIImage(named: "upload_delete"), for: .normal)
         deleteBtn.tag = 1
         deleteBtn.addTarget(self, action: #selector(btnClick(sender:)), for: .touchUpInside)
-        deleteBtn.imageView?.tintColor = BFConfig.shared.styleTitleColor
         return deleteBtn
     }()
 
@@ -328,15 +319,15 @@ open class PQUploadController: BFBaseViewController {
         nextBtn.titleLabel?.font = UIFont.systemFont(ofSize: 13, weight: .medium)
         nextBtn.tag = 3
         nextBtn.addTarget(self, action: #selector(btnClick(sender:)), for: .touchUpInside)
-        nextBtn.backgroundColor = UIColor.hexColor(hexadecimal: BFConfig.shared.styleColor.rawValue)
-        nextBtn.setTitleColor(.white, for: .normal)
+        nextBtn.backgroundColor = UIColor.hexColor(hexadecimal: "#333333")
+        nextBtn.setTitleColor(UIColor.hexColor(hexadecimal: "#999999"), for: .normal)
         return nextBtn
     }()
 
     public lazy var bottomView: UIView = {
         let bottomView = UIView(frame: CGRect(x: 0, y: cDefaultMargin * 2, width: cScreenWidth, height: cDevice_iPhoneNavBarHei))
         bottomView.addSubview(selecteBtn)
-        bottomView.backgroundColor = BFConfig.shared.styleBackGroundColor
+        bottomView.backgroundColor = UIColor.hexColor(hexadecimal: "#191919")
         selecteBtn.center.y = nextBtn.center.y
         bottomView.addSubview(deleteBtn)
         bottomView.addSubview(nextBtn)
@@ -344,7 +335,7 @@ open class PQUploadController: BFBaseViewController {
         return bottomView
     }()
 
-    override open func viewDidLoad() {
+    open override func viewDidLoad() {
         super.viewDidLoad()
 
         view.backgroundColor = BFConfig.shared.editCoverimageSelectedbackgroundColor
@@ -359,7 +350,7 @@ open class PQUploadController: BFBaseViewController {
         //        PHPhotoLibrary.shared().unregisterChangeObserver(self)
     }
 
-    override open func viewDidDisappear(_ animated: Bool) {
+    open override func viewDidDisappear(_ animated: Bool) {
         super.viewDidDisappear(animated)
         if !isAssetImage {
             avPlayer.pause()
@@ -367,7 +358,7 @@ open class PQUploadController: BFBaseViewController {
         }
     }
 
-    override open func viewWillAppear(_ animated: Bool) {
+    open override func viewWillAppear(_ animated: Bool) {
         super.viewDidAppear(animated)
         if !isAssetImage {
             if selectedData != nil {
@@ -381,7 +372,7 @@ open class PQUploadController: BFBaseViewController {
         addPlayerItemObserver()
     }
 
-    override open func viewWillDisappear(_ animated: Bool) {
+    open override func viewWillDisappear(_ animated: Bool) {
         super.viewWillDisappear(animated)
         removePlayerItemObserver()
     }
@@ -491,10 +482,10 @@ open class PQUploadController: BFBaseViewController {
                 }
             }
         }
-//        if !isAssetImage {
-//            // 视频上传相关上报
-//            PQEventTrackViewModel.baseReportUpload(businessType: .bt_pageView, objectType: .ot_pageView, pageSource: .sp_upload_videoSelect, extParams: ["source": getSourceType().rawValue, "projectId": getMakeVideoProjectId() ?? "", "draftboxId": getMakeVideoDraftboxId() ?? ""], remindmsg: "上传相关")
-//        }
+        if !isAssetImage {
+            // 视频上传相关上报
+            PQEventTrackViewModel.baseReportUpload(businessType: .bt_pageView, objectType: .ot_pageView, pageSource: .sp_upload_videoSelect, extParams: ["source": videoUploadSourceType.videoUpload.rawValue, "projectId": "", "draftboxId": ""], remindmsg: "上传相关")
+        }
     }
 
     // 转化处理获取到的相簿
@@ -520,10 +511,11 @@ open class PQUploadController: BFBaseViewController {
         switch sender.tag {
         case 1: // 返回
             navigationController?.popViewController(animated: true)
-//            if !isAssetImage {
-//                // 视频上传相关上报
-//                PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_up_backBtn, pageSource: .sp_upload_videoSelect, extParams: ["source": getSourceType().rawValue, "projectId": getMakeVideoProjectId() ?? "", "draftboxId": getMakeVideoDraftboxId() ?? ""], remindmsg: "上传相关")
-//            }
+            if !isAssetImage {
+                // 视频上传相关上报
+                // MARK: SanW--待修改-2021.11.08
+                PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_up_backBtn, pageSource: .sp_upload_videoSelect, extParams: ["source": videoUploadSourceType.videoUpload.rawValue, "projectId": "", "draftboxId":""], remindmsg: "上传相关")
+            }
         case 2: // 筛选
             showCollects()
         case 3: // 下一步
@@ -531,17 +523,17 @@ open class PQUploadController: BFBaseViewController {
                 cShowHUB(superView: nil, msg: isAssetImage ? "请选择图片" : "请选择视频")
                 return
             }
-//            if !isAssetImage {
-//                avPlayer.pause()
-//                playBtn.isHidden = false
-//                let coverVc = PQUploadHandingController()
-//                selectedData?.videoFromScene = .UploadNormal
-//                coverVc.uploadData = selectedData
-//                navigationController?.pushViewController(coverVc, animated: true)
-//                // 视频上传相关上报
-//                PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_up_nextBtn, pageSource: .sp_upload_videoSelect, extParams: ["source": videoUploadSourceType.videoUpload.rawValue, "projectId": getMakeVideoProjectId() ?? "", "draftboxId": getMakeVideoDraftboxId() ?? ""], remindmsg: "上传相关")
-//                return
-//            }
+            if !isAssetImage {
+                avPlayer.pause()
+                playBtn.isHidden = false
+                if jumptoPublicHandle != nil {
+                    uploadData?.videoFromScene = .UploadNormal
+                    jumptoPublicHandle!(selectedData)
+                }
+                // 视频上传相关上报
+                PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_up_nextBtn, pageSource: .sp_upload_videoSelect, extParams: ["source": videoUploadSourceType.videoUpload.rawValue, "projectId": "", "draftboxId": ""], remindmsg: "上传相关")
+                return
+            }
             imageManager.requestImage(for: (selectedData?.asset)!, targetSize: itemSize, contentMode: .aspectFill, options: nil) { [weak self] image, _ in
                 self?.selectedData?.image = image
                 let vc = PQImageCropVC(image: (self?.selectedData?.image)!, aspectWidth: self?.videoWidth ?? 0.0, aspectHeight: self?.videoHeight ?? 0.0)
@@ -559,7 +551,7 @@ open class PQUploadController: BFBaseViewController {
         }
     }
 
-    @objc func showCollects() {
+    @objc public func showCollects() {
         if categoryData.count <= 0 {
             return
         }
@@ -577,7 +569,7 @@ open class PQUploadController: BFBaseViewController {
         }
     }
 
-    @objc func showCategoryView() {
+    @objc public func showCategoryView() {
         categoryView.isHidden = false
         categoryView.alpha = 0
         view.bringSubviewToFront(categoryView)
@@ -656,7 +648,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
                 cell.uploadData = itemData
                 if itemData.image == nil, itemData.asset != nil {
                     cell.representedAssetIdentifier = itemData.asset?.localIdentifier
-                    imageManager.requestImage(for: itemData.asset!, targetSize: itemSize, contentMode: .aspectFill, options: nil) { [weak self, weak cell] image, info in
+                    imageManager.requestImage(for: itemData.asset!, targetSize: itemSize, contentMode: .aspectFill, options: nil) {[weak self, weak cell] image, info in
                         if info?.keys.contains("PHImageResultIsDegradedKey") ?? false, "\(info?["PHImageResultIsDegradedKey"] ?? "0")" == "0", cell?.representedAssetIdentifier == itemData.asset?.localIdentifier {
                             if image != nil {
                                 itemData.image = image
@@ -665,7 +657,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
                                 let option = PHImageRequestOptions()
                                 option.isNetworkAccessAllowed = true
                                 option.resizeMode = .fast
-                                self?.imageManager.requestImageData(for: itemData.asset!, options: option) { [weak cell] data, _, _, _ in
+                                self?.imageManager.requestImageData(for: itemData.asset!, options: option) {[weak cell] data, _, _, _ in
                                     if data != nil {
                                         let image = UIImage(data: data!)
                                         itemData.image = image
@@ -686,7 +678,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
             let asset = itemData.categoryList.object(at: 0)
             if itemData.image == nil {
                 cell.representedAssetIdentifier = asset.localIdentifier
-                imageManager.requestImage(for: asset, targetSize: itemSize, contentMode: .aspectFill, options: nil) { [weak cell] image, info in
+                imageManager.requestImage(for: asset, targetSize: itemSize, contentMode: .aspectFill, options: nil) {[weak cell] image, info in
                     if info?.keys.contains("PHImageResultIsDegradedKey") ?? false, "\(info?["PHImageResultIsDegradedKey"] ?? "0")" == "0", cell?.representedAssetIdentifier == asset.localIdentifier {
                         itemData.image = image
                         cell?.uploadData = itemData
@@ -791,7 +783,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
                                 self?.playerHeaderView.layer.insertSublayer(self!.playerLayer, at: 0)
                             }
                             self?.avPlayer.play()
-                            //                            self?.playerHeaderView.image = itemData.image
+//                            self?.playerHeaderView.image = itemData.image
 
                             let tracks = (playerItem?.asset as? AVURLAsset)?.tracks(withMediaType: .video)
                             if tracks != nil, (tracks?.count ?? 0) > 0 {
@@ -832,7 +824,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
             allPhotos = categoryData[indexPath.item].categoryList
             catagerySelectedIndex = indexPath
             selecteBtn.setTitle(categoryData[indexPath.item].title, for: .normal)
-            selecteBtn.imagePosition(at: PQButtonImageEdgeInsetsStyle.right, space: cDefaultMargin / 2)
+            selecteBtn.imagePosition(at: .right, space: cDefaultMargin / 2)
             if !isAssetImage {
                 playBtn.isHidden = false
                 sliderView.isHidden = true
@@ -885,7 +877,7 @@ extension PQUploadController: UICollectionViewDelegate, UICollectionViewDataSour
 
     open func scrollViewDidScroll(_: UIScrollView) {
         // 这里是不是有用的?
-//        if currentController() is PQUploadController || currentController() is PQImageSelectedController {
+//        if bf_getCurrentViewController() is PQUploadController || bf_getCurrentViewController() is PQImageSelectedController {
 //            if scrollView == collectionView {
 //                updateCachedAssets()
 //            } else {
@@ -912,7 +904,7 @@ extension PQUploadController {
         playerItem?.removeObserver(self, forKeyPath: "status")
     }
 
-    override public func observeValue(forKeyPath keyPath: String?, of object: Any?, change _: [NSKeyValueChangeKey: Any]?, context _: UnsafeMutableRawPointer?) {
+    open override func observeValue(forKeyPath keyPath: String?, of object: Any?, change _: [NSKeyValueChangeKey: Any]?, context _: UnsafeMutableRawPointer?) {
         if object is AVPlayerItem, keyPath == "status" {
             BFLog(message: "(object as! AVPlayerItem).status = \((object as! AVPlayerItem).status.rawValue)")
             BFLoadingHUB.shared.dismissHUB(superView: playerHeaderView)

+ 2 - 0
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointMaterialController.swift

@@ -56,6 +56,8 @@ public class PQStuckPointMaterialController: BFBaseViewController {
         albumController.selectedHandle = { [weak self] seletedData in
             if seletedData != nil {
                 self?.albumSelectedHandle(seletedData: seletedData)
+            } else {
+                self?.changeCollecBtn.isSelected = false
             }
         }
         return albumController

+ 8 - 15
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointMusicContentController.swift

@@ -6,9 +6,9 @@
 //  Copyright © 2021 BytesFlow. All rights reserved.
 //
 
-import UIKit
 import BFCommonKit
 @_exported import BFUIKit
+import UIKit
 
 class PQStuckPointMusicContentController: BFBaseViewController {
     var itemList: [Any] = Array<Any>.init() // 所有分类数据
@@ -33,14 +33,7 @@ class PQStuckPointMusicContentController: BFBaseViewController {
     // 卡点音乐页面类型
     var contentType: stuckPointMusicContentType = .catagery {
         didSet {
-            if contentType == .page || contentType == .serach {
-                // SanW - 待修改 - 
-//                collectionView.addRefreshView(type: .REFRESH_TYPE_FOOTER) { [weak self] isRefresh in
-//                    if self?.refreshHandle != nil {
-//                        self?.refreshHandle!(isRefresh, self?.contentType ?? .catagery)
-//                    }
-//                }
-            } else {
+            if contentType == .catagery {
                 lastIndexPath = IndexPath(item: 0, section: 0)
             }
         }
@@ -68,13 +61,13 @@ class PQStuckPointMusicContentController: BFBaseViewController {
         }
         // 延迟scrollView上子视图的响应,所以当直接拖动UISlider时,如果此时touch时间在150ms以内,UIScrollView会认为是拖动自己,从而拦截了event,导致UISlider接收不到滑动的event
         collectionView.delaysContentTouches = false
-        collectionView.addRefreshView (type:.REFRESH_TYPE_FOOTER) {[weak self, weak collectionView] isRefresh in
-            if !isRefresh && self?.contentType != .catagery{
+        collectionView.addRefreshView(type: .REFRESH_TYPE_FOOTER) { [weak self, weak collectionView] isRefresh in
+            if !isRefresh, self?.contentType != .catagery {
                 // 请求一下加载更多
                 if self?.refreshHandle != nil {
                     self?.refreshHandle!(isRefresh, self?.contentType ?? .catagery)
                 }
-            }else{
+            } else {
                 collectionView?.mj_footer?.endRefreshing()
             }
         }
@@ -102,7 +95,7 @@ class PQStuckPointMusicContentController: BFBaseViewController {
         emptyData.netDisRefreshBgColor = UIColor.hexColor(hexadecimal: "#FA6400")
         emptyData.netDisTitle = "内容加载失败"
         emptyData.netDisTitleColor = UIColor.hexColor(hexadecimal: "#333333")
-        emptyData.netemptyDisImage = UIImage.init(named: "empty_netDis_icon")
+        emptyData.netemptyDisImage = UIImage(named: "empty_netDis_icon")
         emptyData.netDisRefreshTitle = NSMutableAttributedString(string: "重新加载", attributes: [.font: UIFont.systemFont(ofSize: 16, weight: .medium), .foregroundColor: UIColor.white])
         return emptyData
     }()
@@ -247,9 +240,9 @@ extension PQStuckPointMusicContentController: UICollectionViewDelegate, UICollec
     func collectionView(_ collectionView: UICollectionView, layout _: UICollectionViewLayout, sizeForItemAt indexPath: IndexPath) -> CGSize {
         let itemData: Any = itemList[indexPath.item]
         if let sul = itemData as? ([PQStuckPointMusicTagsModel], ([UICollectionViewLayoutAttributes], CGFloat)) {
-            let height: CGFloat = (sul.0.count > 0) ? (sul.1.1  + 35) : 0
+            let height: CGFloat = (sul.0.count > 0) ? (sul.1.1 + 35) : 0
             return CGSize(width: collectionView.frame.width, height: height)
-        }else if itemData is BFEmptyModel {
+        } else if itemData is BFEmptyModel {
             return CGSize(width: collectionView.frame.width, height: 290)
         } else {
             return CGSize(width: collectionView.frame.width, height: cellHight)

+ 161 - 173
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointPublicController.swift

@@ -6,17 +6,17 @@
 //  Copyright © 2021 BytesFlow. All rights reserved.
 //
 
+import Alamofire
+import BFCommonKit
+import BFUIKit
+import Kingfisher
+import ObjectMapper
 import Photos
 import UIKit
-import ObjectMapper
 import WechatOpenSDK
-import Kingfisher
-import Alamofire
-import BFUIKit
-import BFCommonKit
 
-//mdf by ak 按 UI图 下方操作区的高度是固定的, 其它区高度和设备自适应
-public let bottomOprationBgViewHeight:CGFloat = 322.0
+// mdf by ak 按 UI图 下方操作区的高度是固定的, 其它区高度和设备自适应
+public let bottomOprationBgViewHeight: CGFloat = 322.0
 class PQStuckPointPublicController: BFBaseViewController {
     private var isShared: Bool = false // 是否在分享
     private var isExportSuccess: Bool = false // 是否导出完成
@@ -77,8 +77,8 @@ class PQStuckPointPublicController: BFBaseViewController {
     var clipAudioRange: CMTimeRange = CMTimeRange.zero
     // 导出的开始的开始和结束时间
     var playeTimeRange: CMTimeRange = CMTimeRange()
-    
-    //---------------------------add by ak 保存系统相册使用的变量
+
+    // ---------------------------add by ak 保存系统相册使用的变量
     // 导出有水印的正片
     private var watermarkMovieExporter: PQCompositionExporter!
     // 带水印 MP4 导出地址
@@ -89,10 +89,10 @@ class PQStuckPointPublicController: BFBaseViewController {
     private var endMovieLocalURL: URL?
     // 保存相册的合成视频地址 水印+片尾 MP4 地址
     private var saveMovieLocalURL: URL?
-    
+
     private var isSaveingLocalVideo = false
-  
-    //----------------------------
+
+    // ----------------------------
 
     // 预览大小
     private var preViewSize: CGSize {
@@ -194,7 +194,7 @@ class PQStuckPointPublicController: BFBaseViewController {
         playerHeaderCoverImageView.clipsToBounds = true
 
         let playBtn = UIButton(type: .custom)
-        playBtn.setImage(UIImage.moduleImage(named: "icon_video_play", moduleName: "BFFramework",isAssets: false), for: .normal)
+        playBtn.setImage(UIImage.moduleImage(named: "icon_video_play", moduleName: "BFFramework", isAssets: false), for: .normal)
         playBtn.tag = 4
         playBtn.isUserInteractionEnabled = false
         playerHeaderCoverImageView.addSubview(playBtn)
@@ -242,7 +242,7 @@ class PQStuckPointPublicController: BFBaseViewController {
     lazy var playBtn: UIButton = {
         let playBtn = UIButton(type: .custom)
         playBtn.frame = CGRect(x: (preViewSize.width - cDefaultMargin * 5) / 2, y: (preViewSize.height - cDefaultMargin * 5) / 2, width: cDefaultMargin * 5, height: cDefaultMargin * 5)
-        playBtn.setImage(UIImage.moduleImage(named: "icon_video_play", moduleName: "BFFramework",isAssets: false), for: .normal)
+        playBtn.setImage(UIImage.moduleImage(named: "icon_video_play", moduleName: "BFFramework", isAssets: false), for: .normal)
         playBtn.tag = 4
         playBtn.isHidden = true
         playBtn.isUserInteractionEnabled = false
@@ -297,7 +297,7 @@ class PQStuckPointPublicController: BFBaseViewController {
     // 手势提示
     lazy var pinView: UIImageView = {
         let pinView = UIImageView()
-        pinView.kf.setImage(with: URL(fileURLWithPath: (currentBundlePath()!.path(forResource: "editCoverPin", ofType: ".gif")!)))
+        pinView.kf.setImage(with: URL(fileURLWithPath: currentBundlePath()!.path(forResource: "editCoverPin", ofType: ".gif")!))
         return pinView
     }()
 
@@ -343,7 +343,7 @@ class PQStuckPointPublicController: BFBaseViewController {
         publicTitleView.isHidden = true
         publicTitleView.confirmBtnClock = { [weak self] title in
             BFLog(message: "传出的 title  is :\(String(describing: title))")
-            if title?.count != 0 && title != self?.titleLabel.text {
+            if title?.count != 0, title != self?.titleLabel.text {
                 self?.changPlayerIsPause(isPause: false)
 
                 // 判断文字是否有效
@@ -378,7 +378,7 @@ class PQStuckPointPublicController: BFBaseViewController {
     lazy var shareWechatBtn: UIButton = {
         let shareWechatBtn = UIButton(type: .custom)
         shareWechatBtn.frame = CGRect(x: 0, y: 0, width: 70, height: 70)
-        shareWechatBtn.setImage(UIImage.moduleImage(named: "reCreate_opration_wechat", moduleName: "BFFramework",isAssets: false), for: .normal)
+        shareWechatBtn.setImage(UIImage.moduleImage(named: "reCreate_opration_wechat", moduleName: "BFFramework", isAssets: false), for: .normal)
         shareWechatBtn.backgroundColor = BFConfig.shared.styleBackGroundColor
         shareWechatBtn.addCorner(corner: 6)
         shareWechatBtn.tag = 2
@@ -390,7 +390,7 @@ class PQStuckPointPublicController: BFBaseViewController {
     lazy var shareFriendBtn: UIButton = {
         let shareFriendBtn = UIButton(type: .custom)
         shareFriendBtn.frame = CGRect(x: 0, y: 0, width: 70, height: 70)
-        shareFriendBtn.setImage(UIImage.moduleImage(named: BFConfig.shared.shareFriendBtnImage, moduleName: "BFFramework",isAssets: false), for: .normal)
+        shareFriendBtn.setImage(UIImage.moduleImage(named: BFConfig.shared.shareFriendBtnImage, moduleName: "BFFramework", isAssets: false), for: .normal)
         shareFriendBtn.addCorner(corner: 6)
         shareFriendBtn.tag = 1
         shareFriendBtn.addTarget(self, action: #selector(btnClick(sender:)), for: .touchUpInside)
@@ -425,15 +425,16 @@ class PQStuckPointPublicController: BFBaseViewController {
         bottomOprationBgView.isHidden = true
         return bottomOprationBgView
     }()
-    
-    ///保存视频到相册提示
+
+    /// 保存视频到相册提示
     lazy var saveVideoTipsBgView: UIView = {
         let saveVideoTipsBgView = UIView(frame: CGRect(x: 0, y: cDevice_iPhoneNavBarAndStatusBarHei, width: cScreenWidth, height: 40))
-        saveVideoTipsBgView.backgroundColor = UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.4)
+        saveVideoTipsBgView.backgroundColor = UIColor(red: 0, green: 0, blue: 0, alpha: 0.4)
         saveVideoTipsBgView.isHidden = true
         saveVideoTipsBgView.alpha = 1
         return saveVideoTipsBgView
     }()
+
     lazy var saveVideoTipsLabel: UILabel = {
         let saveVideoTipsLabel = UILabel(frame: CGRect(x: 0, y: 0, width: cScreenWidth, height: 40))
         saveVideoTipsLabel.textColor = .white
@@ -443,7 +444,7 @@ class PQStuckPointPublicController: BFBaseViewController {
         saveVideoTipsLabel.sizeToFit()
         return saveVideoTipsLabel
     }()
-    
+
     // 保存重试
     lazy var saveRetryBtn: UIButton = {
         let saveRetryBtn = UIButton(type: .custom)
@@ -529,8 +530,7 @@ class PQStuckPointPublicController: BFBaseViewController {
 
         view.addSubview(publicTitleView)
         view.addSubview(publicEditCoverView)
-        
-        
+
         view.addSubview(saveVideoTipsBgView)
         saveVideoTipsBgView.addSubview(saveVideoTipsLabel)
         saveVideoTipsBgView.addSubview(saveRetryBtn)
@@ -545,7 +545,6 @@ class PQStuckPointPublicController: BFBaseViewController {
             make.width.equalTo(50)
         }
 
- 
         coverImageTitle.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(settingCoverImage)))
         coverImageView.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(settingCoverImage)))
 
@@ -644,20 +643,19 @@ class PQStuckPointPublicController: BFBaseViewController {
 
         // 取推荐标题
         getTitles()
-        
+
         networkStausListen()
-      
     }
 
     override func viewWillAppear(_ animated: Bool) {
         super.viewWillAppear(animated)
         PQNotification.addObserver(self, selector: #selector(enterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
         PQNotification.addObserver(self, selector: #selector(willEnterForeground), name: UIApplication.willEnterForegroundNotification, object: nil)
-        
+
         DispatchQueue.main.async {
             UIApplication.shared.isIdleTimerDisabled = true
         }
-        //从相册选择一个照片后回调
+        // 从相册选择一个照片后回调
         addNotification(self, selector: #selector(imageSelectedImage(notify:)), name: cSelectedImageSuccessKey, object: nil)
 
         #if swift(>=4.2)
@@ -694,21 +692,22 @@ class PQStuckPointPublicController: BFBaseViewController {
         if exporter != nil {
             exporter.cancel()
         }
-        if watermarkMovieExporter != nil{
+        if watermarkMovieExporter != nil {
             watermarkMovieExporter.cancel()
         }
-        if endMovieExporter != nil{
+        if endMovieExporter != nil {
             endMovieExporter.cancel()
         }
- 
-     
+
         avPlayer.pause()
         avPlayer.replaceCurrentItem(with: nil)
         // 点击上报:返回按钮
         PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_click_back, pageSource: .sp_stuck_publishSyncedUp, extParams: nil, remindmsg: "卡点视频数据上报-(点击上报:返回按钮)")
     }
+
     // MARK: - 网络监控
-    func networkStausListen(){
+
+    func networkStausListen() {
         manager?.startListening(onUpdatePerforming: { status in
             if status == .reachable(.cellular) || status == .reachable(.ethernetOrWiFi) {
                 cHiddenHUB(superView: nil)
@@ -719,7 +718,6 @@ class PQStuckPointPublicController: BFBaseViewController {
     }
 }
 
-
 // MARK: - 导出/上传/下载及其他方法
 
 /// 导出/上传/下载及其他方法
@@ -736,8 +734,8 @@ extension PQStuckPointPublicController {
 
         let originaDuration = CMTimeGetSeconds(clipAudioRange.duration)
         BFLog(message: "处理主音频 原始时长startTime = \(originaDuration) 要显示时长totalDuration = \(mTotalDuration)")
-        //originaDuration =  37.616768 mTotalDuration = 37.616776 TODO 都用 INT 微秒级
-        if  Float64(String(format: "%.3f",mTotalDuration)) ?? 0.0 <=  Float64(String(format: "%.3f",originaDuration)) ?? 0.0 {
+        // originaDuration =  37.616768 mTotalDuration = 37.616776 TODO 都用 INT 微秒级
+        if Float64(String(format: "%.3f", mTotalDuration)) ?? 0.0 <= Float64(String(format: "%.3f", originaDuration)) ?? 0.0 {
             BFLog(message: "不用拼接音频文件 \(originAsset.url) 时长is \(CMTimeGetSeconds(originAsset.duration))")
             completeHander(originAsset.url)
             return
@@ -757,7 +755,7 @@ extension PQStuckPointPublicController {
         if count > 0 {
             for index in 0 ..< count {
                 // 第0段从0开始到推荐的结束,播放器的开始时间不是从0开始的
-                duration = CMTime(value: CMTimeValue((CMTimeGetSeconds(clipAudioRange.end)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
+                duration = CMTime(value: CMTimeValue(CMTimeGetSeconds(clipAudioRange.end) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
                 BFLog(message: "每一个文件的 duration \(CMTimeGetSeconds(duration))")
                 var timeRange = CMTimeRangeMake(start: .zero, duration: duration)
 
@@ -849,7 +847,7 @@ extension PQStuckPointPublicController {
     }
 
     func appendAudio() {
-        //更新一下假进度
+        // 更新一下假进度
         updatePublicCurrentProgress(useProgress: 0.01)
         let inputAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + (audioMixModel?.localPath ?? "")), options: nil)
         let startMergeTime = CFAbsoluteTimeGetCurrent()
@@ -858,17 +856,16 @@ extension PQStuckPointPublicController {
             if completURL != nil {
                 let asset = AVURLAsset(url: completURL!, options: nil)
                 BFLog(message: "拼接后音频时长\(asset.duration.seconds)  url is \(String(describing: completURL)) 用时\(CFAbsoluteTimeGetCurrent() - startMergeTime)")
-                //导出不带水印的正片
+                // 导出不带水印的正片
                 self?.beginExport(inputAsset: asset)
-                
-                if(BFConfig.shared.enableWatermarkMovie){
-                    //导出带水印的正片
-                    self?.beginExportWatermarkMovie(inputAsset:asset)
+
+                if BFConfig.shared.enableWatermarkMovie {
+                    // 导出带水印的正片
+                    self?.beginExportWatermarkMovie(inputAsset: asset)
                 }
-            }else{
+            } else {
                 cShowHUB(superView: self?.view, msg: "合成失败请重试。")
             }
-            
         }
     }
 
@@ -904,7 +901,7 @@ extension PQStuckPointPublicController {
             }
         }
         BFLog(message: "导出设置的码率为:\(orgeBitRate)")
-        let tempBeginExport =  Date().timeIntervalSince1970
+        let tempBeginExport = Date().timeIntervalSince1970
         exporter.showGaussianBlur = true
         if exporter.prepare(videoSize: CGSize(width: editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0, height: editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0), videoAverageBitRate: orgeBitRate) {
             BFLog(message: "开始导出 \(String(describing: playeTimeRange.start)) 结束 \(String(describing: playeTimeRange.end))")
@@ -912,7 +909,7 @@ extension PQStuckPointPublicController {
             BFLog(message: "开始导出")
         }
         exporter.progressClosure = { [weak self] _, _, progress in
-            BFLog(message: "正片合成进度 \(progress*100)%")
+            BFLog(message: "正片合成进度 \(progress * 100)%")
             let useProgress = progress > 1 ? 1 : progress
             if progress > 0, Int(useProgress * 100) > (self?.exportProgrss ?? 0) {
                 // 更新进度
@@ -920,12 +917,12 @@ extension PQStuckPointPublicController {
             }
         }
         exporter.completion = { [weak self] url in
-           
-            //输出视频时长
-           let  outSeconds = CMTimeGetSeconds(AVAsset(url: (url ?? URL(string: "https://media.w3.org/2010/05/sintel/trailer.mp4")!)).duration)
-            
+
+            // 输出视频时长
+            let outSeconds = CMTimeGetSeconds(AVAsset(url: url ?? URL(string: "https://media.w3.org/2010/05/sintel/trailer.mp4")!).duration)
+
             BFLog(message: "无水印的视频导出完成: \(String(describing: url)) 生成视频时长为:\(outSeconds)")
-            if(outSeconds == 0){
+            if outSeconds == 0 {
                 cShowHUB(superView: self?.view, msg: "合成失败请重试。")
                 return
             }
@@ -939,15 +936,14 @@ extension PQStuckPointPublicController {
                 self?.isExportSuccess = true
                 self?.exportEndDate = Date().timeIntervalSince1970
                 BFLog(message: "视频导出完成-开始去发布视频 总时长为\((self?.exportEndDate ?? 0) - (self?.startExportDate ?? 0) * 1000) 总用时\((self?.exportEndDate ?? 0) - tempBeginExport)")
-   
+
                 self?.exportLocalURL = url
-                
-                
+
                 // add by ak 不生成水印视频时直接自动保存系统相册,e.g. 乐活圈中会执行
-                if(!BFConfig.shared.enableWatermarkMovie){
+                if !BFConfig.shared.enableWatermarkMovie {
                     self?.authorizationStatus()
                 }
-       
+
                 /// fp2-1-1 - 请求权限
 //                self?.authorizationStatus()
                 /// fp2-2 - 保存草稿
@@ -983,10 +979,9 @@ extension PQStuckPointPublicController {
     /// - Parameter localPath: localPath description
     /// - Returns: <#description#>
     func saveStuckPointVideo() {
-        
-        let tempSaveMoveiLocal:URL? = BFConfig.shared.enableWatermarkMovie ? saveMovieLocalURL : exportLocalURL
-        
-        if(tempSaveMoveiLocal == nil){
+        let tempSaveMoveiLocal: URL? = BFConfig.shared.enableWatermarkMovie ? saveMovieLocalURL : exportLocalURL
+
+        if tempSaveMoveiLocal == nil {
             BFLog(message: "保存相册的视频导出地址无效!!!")
             cShowHUB(superView: nil, msg: "保存相册的视频导出地址无效")
             saveVideoTipsLabel.text = "视频保存失败"
@@ -1013,15 +1008,13 @@ extension PQStuckPointPublicController {
                             DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.8) { [weak self] in
                                 self?.saveVideoTipsBgView.isHidden = true
                             }
-                         
+
                         } else {
                             self?.saveVideoTipsLabel.text = "视频保存失败"
                             self?.saveRetryBtn.isHidden = false
 //                            DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.8) { [weak self] in
 //                                self?.saveVideoTipsBgView.isHidden = true
 //                            }
-
-                            
                         }
                     }
                 }
@@ -1130,7 +1123,7 @@ extension PQStuckPointPublicController {
             if isExportSuccess, exportLocalURL != nil {
                 let size = try! exportLocalURL?.resourceValues(forKeys: [.fileSizeKey])
                 BFLog(message: "size = \(String(describing: size))")
-                if (size?.fileSize ?? 0) > 0 && Float64(size?.fileSize ?? 0) <= maxUploadSize {
+                if (size?.fileSize ?? 0) > 0, Float64(size?.fileSize ?? 0) <= maxUploadSize {
                     /// fp5 - 上传视频
                     reUploadVideo()
                 }
@@ -1194,10 +1187,10 @@ extension PQStuckPointPublicController {
                 if code == 6 { // 无网
                     let uploadRequest: OSSMultipartUploadRequest? = PQAliOssUtil.shared.allTasks[self?.uploadData?.videoBucketKey ?? ""]
                     if !(uploadRequest != nil && "\(uploadRequest?.callbackParam["code"] ?? "0")" == "1") {
-                        self?.showUploadRemindView(msg:"aliOss")
+                        self?.showUploadRemindView(msg: "aliOss")
                     }
                 } else if code == 260 {
-                    self?.showUploadRemindView(isNetCollected: false, msg:"aliOss")
+                    self?.showUploadRemindView(isNetCollected: false, msg: "aliOss")
                 } else if code != 1 {
                     // 上传失败-播放视频
                     self?.publicEnd(isError: true)
@@ -1235,7 +1228,7 @@ extension PQStuckPointPublicController {
             let size = try! URL(string: uploadData?.localPath ?? "")?.resourceValues(forKeys: [.fileSizeKey])
             BFLog(message: "size = \(String(describing: size))")
             if Float64(size?.fileSize ?? 0) > maxUploadSize {
-                cShowHUB(superView: nil, msg:"无法发布大于10G的视频,请重新选择/合成发布")
+                cShowHUB(superView: nil, msg: "无法发布大于10G的视频,请重新选择/合成发布")
                 // 上传失败-播放视频
                 publicEnd(isError: true)
                 return
@@ -1311,6 +1304,9 @@ extension PQStuckPointPublicController {
                     reCreateVideo.reProduceVideoFlag = 1
                     self?.videoData?.reCreateVideoData = reCreateVideo
                 }
+                if self?.videoData != nil {
+                    postNotification(name: cUpdateVideoSuccessKey, userInfo: ["videoData": (self?.videoData)!])
+                }
                 postNotification(name: cPublishStuckPointSuccessKey, userInfo: ["newVideoData": self?.videoData ?? PQVideoListModel()])
                 BFLog(message: "发布成功==\(videoData.title ?? ""),uplpadBucketKey = \(videoData.uplpadBucketKey ?? "")")
 //                                cShowHUB(superView: nil, msg: "视频发布成功")
@@ -1344,21 +1340,20 @@ extension PQStuckPointPublicController {
             cShowHUB(superView: nil, msg: "视频发布失败,请重新合成")
         } else {
             bottomOprationBgView.isHidden = false
-            //add by ak 发布成功后如果带片尾的视频还没有生成成功时,出提示
-            self.saveRetryBtn.isHidden = true
-            self.saveVideoTipsBgView.isHidden = false
-            if(self.isSaveingLocalVideo){
+            // add by ak 发布成功后如果带片尾的视频还没有生成成功时,出提示
+            saveRetryBtn.isHidden = true
+            saveVideoTipsBgView.isHidden = false
+            if isSaveingLocalVideo {
                 saveVideoTipsLabel.text = "视频保存中..."
-            }else{
-                self.saveVideoTipsLabel.text = "视频已保存到相册"
+            } else {
+                saveVideoTipsLabel.text = "视频已保存到相册"
                 DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.8) { [weak self] in
                     self?.saveVideoTipsBgView.isHidden = true
                 }
             }
-            if self.isSaveingLocalVideo{
+            if isSaveingLocalVideo {
                 saveVideoTipsBgView.isHidden = false
             }
-            
         }
     }
 
@@ -1467,7 +1462,13 @@ extension PQStuckPointPublicController {
 
             // 点击上报:完成
             PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_click_finished, pageSource: .sp_stuck_publishSyncedUp, extParams: ["videoId": videoData?.uniqueId ?? ""], remindmsg: "卡点视频数据上报-(点击上报:完成)")
-            navigationController?.viewControllers = [(navigationController?.viewControllers.first)!]
+            bf_getCurrentViewController()?.dismiss(animated: false) {
+                bf_getCurrentViewController()?.navigationController?.viewControllers = [bf_getCurrentViewController()?.navigationController?.viewControllers.first ?? BFBaseViewController()]
+                (bf_getRootViewController() as? UITabBarController)?.selectedIndex = 4
+                DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 0.5) {
+                    postNotification(name: cPublishSuccessKey)
+                }
+            }
             // 发送通知
             postNotification(name: cFinishedPublishedNotiKey)
         case 97:
@@ -1486,7 +1487,7 @@ extension PQStuckPointPublicController {
     ///   - msg: <#msg description#>
     func showUploadRemindView(isNetCollected _: Bool = true, msg: String? = nil) {
         view.endEditing(true)
-        
+
         let emptyData = BFEmptyModel()
         emptyData.isRefreshHidden = false
         emptyData.title = "上传失败"
@@ -1494,26 +1495,26 @@ extension PQStuckPointPublicController {
         emptyData.summary = "建议切换 WIFI/移动网络后再重试"
         emptyData.summaryColor = UIColor.hexColor(hexadecimal: "#353535")
         emptyData.refreshBgColor = UIColor.hexColor(hexadecimal: BFConfig.shared.styleColor.rawValue)
-        emptyData.refreshTitle = NSMutableAttributedString(string: "立即重试", attributes: [.foregroundColor:UIColor.white])
-        emptyData.emptySoureImage = UIImage.moduleImage(named: "stuckPoint_video_empty", moduleName: "BFMaterialKit",isAssets: false)
+        emptyData.refreshTitle = NSMutableAttributedString(string: "立即重试", attributes: [.foregroundColor: UIColor.white])
+        emptyData.emptySoureImage = UIImage.moduleImage(named: "stuckPoint_video_empty", moduleName: "BFMaterialKit", isAssets: false)
         emptyData.netDisRefreshBgColor = UIColor.hexColor(hexadecimal: "#FA6400")
         emptyData.netDisTitle = "内容加载失败"
         emptyData.netDisTitleColor = UIColor.hexColor(hexadecimal: "#333333")
-        emptyData.netemptyDisImage = UIImage.moduleImage(named: "empty_netDis_icon", moduleName: "BFMaterialKit",isAssets: false)
+        emptyData.netemptyDisImage = UIImage.moduleImage(named: "empty_netDis_icon", moduleName: "BFMaterialKit", isAssets: false)
         emptyData.netDisRefreshTitle = NSMutableAttributedString(string: "重新加载", attributes: [.font: UIFont.systemFont(ofSize: 16, weight: .medium), .foregroundColor: UIColor.white])
-        
+
         let emptyRemindView = BFEmptyRemindView(frame: CGRect(x: 0, y: cDevice_iPhoneNavBarAndStatusBarHei, width: view.frame.width, height: view.frame.height - cDevice_iPhoneNavBarAndStatusBarHei))
 //        emptyRemindView.isHidden = true
         emptyRemindView.emptyData = emptyData
         emptyRemindView.backgroundColor = BFConfig.shared.styleBackGroundColor
-        emptyRemindView.fullRefreshBloc = {[weak self, weak emptyRemindView] _, _ in
+        emptyRemindView.fullRefreshBloc = { [weak self, weak emptyRemindView] _, _ in
             if emptyRemindView?.refreshBtn.currentAttributedTitle?.string == "立即重试" {
                 emptyRemindView?.isHidden = true
                 // 重试逻辑
-                if let message = msg{
+                if let message = msg {
                     if message.contains("token") {
                         self?.uploadVideo()
-                    }else if message.contains("aliOss"){
+                    } else if message.contains("aliOss") {
                         self?.uploadVideo()
                     }
                 }
@@ -1521,7 +1522,7 @@ extension PQStuckPointPublicController {
         }
         emptyRemindView.refreshBtn.addCorner(corner: 4)
         view.addSubview(emptyRemindView)
-    
+
 //        BFRemindView.showUploadRemindView(title: "上传失败", summary: (msg != nil ? msg! : "视频文件已丢失"), confirmTitle:  "立即重试") { [weak self] _, _ in
 //            self?.navigationController?.popToViewController((self?.navigationController?.viewControllers[1])!, animated: true)
 //        }
@@ -1594,7 +1595,6 @@ extension PQStuckPointPublicController {
         }
 
         PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_shanyinApp_clickButton_changeTitle, pageSource: .sp_stuck_publishSyncedUp, eventData: ["videoId": videoData?.uniqueId ?? "", "rootPageSource": isReCreate ? "shanyinApp-main-syncedUpMusicRecreate" : "shanyinApp-main-syncedUpMusic"], remindmsg: "")
-
     }
 
     @objc func settingCoverImage() {
@@ -1632,15 +1632,11 @@ extension PQStuckPointPublicController {
         }
 
         PQEventTrackViewModel.baseReportUpload(businessType: .bt_buttonClick, objectType: .ot_shanyinApp_clickButton_changeCover, pageSource: .sp_stuck_publishSyncedUp, eventData: ["videoId": videoData?.uniqueId ?? "", "rootPageSource": isReCreate ? "shanyinApp-main-syncedUpMusicRecreate" : "shanyinApp-main-syncedUpMusic"], remindmsg: "")
-
     }
 
     // 更新标题或封面
     func updateCoverImagegOrTitle() {
         BFLoadingHUB.shared.showHUB(isMode: true)
-        // SanW - 待修改 -
-//        BFLoadingHUB.shared.showHUB(isMode: true)
-        
         PQBaseViewModel.ossTempToken { [weak self] response, msg in
             let image: UIImage = (self?.uploadData?.image)!
             let data = image.jpegData(compressionQuality: 1)
@@ -1659,10 +1655,9 @@ extension PQStuckPointPublicController {
                 )
                 .uploadObjectAsync(bucketName: bucketName, objectKey: objectKey, data: data!, fileExtensions: "png", imageUploadBlock: { _, code, ossObjectKey, _ in
                     if code == 1 && ossObjectKey == objectKey && ossObjectKey.count > 0 {
+                        // add by ak 这里会在服务器生成分享使用的图片到1-2S 时间
+                        PQUploadViewModel.updateVideo(title: self?.videoData?.title ?? "", videoId: self?.videoData?.uniqueId ?? "", coverImgPath: objectKey, descr: "") { _, newVideoData, msg in
 
-                        //add by ak 这里会在服务器生成分享使用的图片到1-2S 时间
-                        PQUploadViewModel.updateVideo(title: self?.videoData?.title ?? "", videoId: self?.videoData?.uniqueId ?? "", coverImgPath: objectKey, descr: "") {_, newVideoData, msg in
-                            
                             if newVideoData == nil {
                                 cShowHUB(superView: self?.view, msg: msg)
                                 // 可能有敏感词 要刷一组新标题并自动更新
@@ -1682,8 +1677,8 @@ extension PQStuckPointPublicController {
                         BFLoadingHUB.shared.dismissHUB()
                     }
                 })
-
-    } }
+        }
+    }
 
     func setTitleText(text: String, textColor: UIColor = UIColor.hexColor(hexadecimal: "#ABABAB")) {
         selectTitle = text
@@ -1700,13 +1695,13 @@ extension PQStuckPointPublicController {
         PQBaseViewModel.getBaseConfig(completeHander: { [weak self] titles in
 
             if (titles?.count ?? 0) > 0 {
-                var temp:Array<String> = titles!
-                if((titles?.count ?? 0) <= 13){
-                    for _ in 0 ... (13 - (titles?.count ?? 0)){
+                var temp: [String] = titles!
+                if (titles?.count ?? 0) <= 13 {
+                    for _ in 0 ... (13 - (titles?.count ?? 0)) {
                         temp.append("")
                     }
                 }
-           
+
                 self?.publicTitleView.titles = temp
 
                 let numberRandom: UInt32 = UInt32(arc4random_uniform(UInt32(titles!.count)))
@@ -1732,8 +1727,9 @@ extension PQStuckPointPublicController {
 }
 
 // MARK: - 导出带水印+片尾的视频相关方法
+
 extension PQStuckPointPublicController {
-    //导出有水印的正片子
+    // 导出有水印的正片子
     func beginExportWatermarkMovie(inputAsset: AVURLAsset!) {
         if !(editProjectModel?.sData?.sections != nil && (editProjectModel?.sData?.sections.count ?? 0) > 0) {
             BFLog(message: "项目段落错误❌")
@@ -1774,7 +1770,6 @@ extension PQStuckPointPublicController {
         }
         watermarkMovieExporter.progressClosure = { _, _, progress in
             BFLog(message: "带水印的合成进度 \(progress) ")
-          
         }
         watermarkMovieExporter.completion = { [weak self] url in
             BFLog(message: "有水印的视频导出完成: \(String(describing: url)) 生成视频时长为:\(CMTimeGetSeconds(AVAsset(url: url ?? URL(string: "https://media.w3.org/2010/05/sintel/trailer.mp4")!).duration))")
@@ -1783,16 +1778,15 @@ extension PQStuckPointPublicController {
             if self?.watermarkMovieExporter != nil {
                 self?.watermarkMovieExporter.cancel()
             }
- 
+
             self?.watermarkMovieLocalURL = url
-            
-            //开始导出片尾 成功后自动保存到相册
+
+            // 开始导出片尾 成功后自动保存到相册
             self?.beginExportEndMovie()
-           
- 
         }
     }
-    //导出片尾视频
+
+    // 导出片尾视频
     func beginExportEndMovie() {
         if !(editProjectModel?.sData?.sections != nil && (editProjectModel?.sData?.sections.count ?? 0) > 0) {
             BFLog(message: "项目段落错误❌")
@@ -1807,16 +1801,16 @@ extension PQStuckPointPublicController {
         outPutMP4Path.append("endMovie_\(String.qe.timestamp()).mp4")
         let outPutMP4URL = URL(fileURLWithPath: outPutMP4Path)
         BFLog(message: "导出视频地址 \(outPutMP4URL)")
-        
+
         var orgeBitRate = (editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) * (editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) * 3
-        
-        //片尾的视频素材地址
-        let moveResPath = currentBundlePath()!.path(forResource:  (editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) <  (editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) ? "endMovieB" : "endMovieA", ofType: "mp4")
-        if(moveResPath?.count ?? 0 == 0){
+
+        // 片尾的视频素材地址
+        let moveResPath = currentBundlePath()!.path(forResource: (editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) < (editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) ? "endMovieB" : "endMovieA", ofType: "mp4")
+        if moveResPath?.count ?? 0 == 0 {
             BFLog(message: "片尾的视频素材地址无效!!!")
             return
         }
-   
+
         let movieAsset = AVURLAsset(url: URL(fileURLWithPath: moveResPath!), options: avAssertOptions)
         let cbr = movieAsset.tracks(withMediaType: .video).first?.estimatedDataRate
         BFLog(message: "cbr  is\(cbr ?? 0)")
@@ -1824,30 +1818,30 @@ extension PQStuckPointPublicController {
             orgeBitRate = Int(cbr ?? 0)
         }
         BFLog(message: "导出设置的码率为:\(orgeBitRate)")
-  
-        //头像保存沙盒地址
+
+        // 头像保存沙盒地址
         BFLog(message: "头像的网络地址\(BFLoginUserInfo.shared.avatarUrl)")
         let avatarFilePath = NSHomeDirectory().appending("/Documents/").appending("user_avatar.jpg")
-        
+
         // warning:给默认头像吧
-        ImageDownloader.default.downloadImage(with: URL(string: BFLoginUserInfo.shared.avatarUrl)!, options: nil) {[weak self] result in
-            var image : UIImage?
+        ImageDownloader.default.downloadImage(with: URL(string: BFLoginUserInfo.shared.avatarUrl)!, options: nil) { [weak self] result in
+            var image: UIImage?
             switch result {
-            case let .success(imageResult):  
+            case let .success(imageResult):
                 image = UIImage.nx_circleImage(imageResult.image)
-                   
+
             case let .failure(error):
-                image = UIImage.moduleImage(named: "user_avatar_normal", moduleName: "BFFramework", isAssets:false)
+                image = UIImage.moduleImage(named: "user_avatar_normal", moduleName: "BFFramework", isAssets: false)
                 BFLog(message: "下载头像图片失败:\(error.localizedDescription)")
             }
-            if(image == nil){
+            if image == nil {
                 BFLog(message: "image date is error!!")
                 return
             }
             UIImage.saveImage(currentImage: image!, outFilePath: avatarFilePath)
-            
-            //1,背景视频素材
-            let bgMovieInfo:PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel.init()
+
+            // 1,背景视频素材
+            let bgMovieInfo: PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel()
             bgMovieInfo.type = StickerType.VIDEO.rawValue
             bgMovieInfo.locationPath = moveResPath ?? ""
             bgMovieInfo.timelineIn = 0
@@ -1855,105 +1849,99 @@ extension PQStuckPointPublicController {
             bgMovieInfo.model_in = bgMovieInfo.timelineIn
             bgMovieInfo.out = bgMovieInfo.timelineOut
             bgMovieInfo.canvasFillType = stickerContentMode.aspectFitStr.rawValue
-            //2,用户头像素材
+            // 2,用户头像素材
             BFLog(message: "头像的沙盒地址:\(avatarFilePath)")
-            let avatarSticker:PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel.init()
+            let avatarSticker: PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel()
             avatarSticker.locationPath = avatarFilePath.replacingOccurrences(of: documensDirectory, with: "")
             avatarSticker.timelineIn = bgMovieInfo.timelineIn
             avatarSticker.timelineOut = bgMovieInfo.timelineOut
             avatarSticker.canvasFillType = stickerContentMode.aspectFitStr.rawValue
 
-             //头像绘制大小\位置
-            var avatarSize:Float = 0.0
-            var avatarTop:Float = 0.0
-            if((self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) > (self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0)){
-                 //竖屏
+            // 头像绘制大小\位置
+            var avatarSize: Float = 0.0
+            var avatarTop: Float = 0.0
+            if (self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) > (self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) {
+                // 竖屏
                 avatarSize = Float(self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) * 360.0 / 1080.0
                 avatarTop = 430
-            }else{
-                //横屏屏
+            } else {
+                // 横屏屏
                 avatarSize = Float(self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) * 300.0 / 1080.0
                 avatarTop = Float(self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) * 130.0 / 1080.0
             }
 
-            let avatarPostion:PQEditMaterialPositionModel = PQEditMaterialPositionModel.init()
+            let avatarPostion: PQEditMaterialPositionModel = PQEditMaterialPositionModel()
             avatarPostion.width = Int(avatarSize)
             avatarPostion.height = Int(avatarSize)
             avatarPostion.x = ((self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) - Int(avatarSize)) / 2
             avatarPostion.y = Int(avatarTop)
             avatarSticker.materialPosition = avatarPostion
-            
-            //3,用户名素材
-            let userNameSticker:PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel.init()
+
+            // 3,用户名素材
+            let userNameSticker: PQEditVisionTrackMaterialsModel = PQEditVisionTrackMaterialsModel()
             userNameSticker.timelineIn = bgMovieInfo.timelineIn
             userNameSticker.timelineOut = bgMovieInfo.timelineOut
             userNameSticker.type = StickerType.SUBTITLE.rawValue
-            
-         
-            //用户名绘制用到的参数
-            var userNameTop:Float = 0.0
-            var userNameFontSize:Float = 0.0
-            if((self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) > (self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0)){
-             //竖屏
+
+            // 用户名绘制用到的参数
+            var userNameTop: Float = 0.0
+            var userNameFontSize: Float = 0.0
+            if (self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) > (self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) {
+                // 竖屏
                 userNameTop = 870
                 userNameFontSize = Float(self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) * 100.0 / 1080.0
-            }else{
-                 //横屏
+            } else {
+                // 横屏
                 userNameTop = Float(self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) * 480 / 1080.0
                 userNameFontSize = Float(self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0) * 70.0 / 1080.0
             }
-          
-            let subtitleInfo:PQEditSubtitleInfoModel = PQEditSubtitleInfoModel.init()
+
+            let subtitleInfo: PQEditSubtitleInfoModel = PQEditSubtitleInfoModel()
             subtitleInfo.fontSize = Int(userNameFontSize)
             subtitleInfo.text = BFLoginUserInfo.shared.nickName
             userNameSticker.subtitleInfo = subtitleInfo
 
-            let userNamePostion:PQEditMaterialPositionModel = PQEditMaterialPositionModel.init()
-            userNamePostion.width = Int(userNameFontSize ) * 10
-            userNamePostion.height = Int(userNameFontSize ) * 3
-            userNamePostion.x = ((self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) -  userNamePostion.width) / 2
+            let userNamePostion: PQEditMaterialPositionModel = PQEditMaterialPositionModel()
+            userNamePostion.width = Int(userNameFontSize) * 10
+            userNamePostion.height = Int(userNameFontSize) * 3
+            userNamePostion.x = ((self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0) - userNamePostion.width) / 2
             userNamePostion.y = Int(userNameTop)
             userNameSticker.materialPosition = userNamePostion
 
-            //4,音频
+            // 4,音频
             let soundResPath = currentBundlePath()!.path(forResource: "endMovieSound", ofType: "mp3")
-            let soundAsset = AVURLAsset(url:  URL(fileURLWithPath: soundResPath ?? ""), options: nil)
-            self?.endMovieExporter = PQCompositionExporter(asset: soundAsset, videoComposition: nil, audioMix: nil, filters: nil, stickers: [bgMovieInfo,avatarSticker,userNameSticker], animationTool: nil, exportURL: outPutMP4URL)
+            let soundAsset = AVURLAsset(url: URL(fileURLWithPath: soundResPath ?? ""), options: nil)
+            self?.endMovieExporter = PQCompositionExporter(asset: soundAsset, videoComposition: nil, audioMix: nil, filters: nil, stickers: [bgMovieInfo, avatarSticker, userNameSticker], animationTool: nil, exportURL: outPutMP4URL)
             self?.endMovieExporter.isEndMovie = true
-            if ((self?.endMovieExporter.prepare(videoSize: CGSize(width: self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0, height: self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0), videoAverageBitRate: orgeBitRate)) != nil) {
-
-                self?.endMovieExporter.start(playeTimeRange: CMTimeRange.init(start: CMTime.zero, duration: CMTimeMakeWithSeconds(Float64(bgMovieInfo.out), preferredTimescale: BASE_FILTER_TIMESCALE)))
+            if (self?.endMovieExporter.prepare(videoSize: CGSize(width: self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0, height: self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0), videoAverageBitRate: orgeBitRate)) != nil {
+                self?.endMovieExporter.start(playeTimeRange: CMTimeRange(start: CMTime.zero, duration: CMTimeMakeWithSeconds(Float64(bgMovieInfo.out), preferredTimescale: BASE_FILTER_TIMESCALE)))
                 BFLog(message: "开始导出")
             }
             self?.endMovieExporter.progressClosure = { _, _, progress in
                 BFLog(message: "片尾合成进度 \(progress) ")
             }
-            
+
             self?.endMovieExporter.completion = { [weak self] url in
                 BFLog(message: "片尾的视频导出完成: \(String(describing: url)) 生成视频时长为:\(CMTimeGetSeconds(AVAsset(url: url ?? URL(string: "https://media.w3.org/2010/05/sintel/trailer.mp4")!).duration))")
-                
+
                 // 导出完成后取消导出
                 if self?.endMovieExporter != nil {
                     self?.endMovieExporter.cancel()
                 }
                 self?.endMovieLocalURL = url
-                //拼接水印正片和片尾
-                if(self?.watermarkMovieLocalURL != nil && self?.endMovieLocalURL != nil){
-                    let videoMerge:NXVideoMerge = NXVideoMerge.init()
-                    videoMerge.mergeAndExportVideos(withFileURLs: [self!.watermarkMovieLocalURL!,self!.endMovieLocalURL!], renderSize:CGSize(width: self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0, height: self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0)) { isSuccess, outFileURL in
-                        if(isSuccess){
+                // 拼接水印正片和片尾
+                if self?.watermarkMovieLocalURL != nil, self?.endMovieLocalURL != nil {
+                    let videoMerge: NXVideoMerge = NXVideoMerge()
+                    videoMerge.mergeAndExportVideos(withFileURLs: [self!.watermarkMovieLocalURL!, self!.endMovieLocalURL!], renderSize: CGSize(width: self?.editProjectModel?.sData?.videoMetaData?.videoWidth ?? 0, height: self?.editProjectModel?.sData?.videoMetaData?.videoHeight ?? 0)) { isSuccess, outFileURL in
+                        if isSuccess {
                             BFLog(message: "合并视频成功 outFilePath is \(outFileURL ?? "")")
                             self?.saveMovieLocalURL = outFileURL as? URL
-                            //保存到相册 fp2-1-1 - 请求权限
+                            // 保存到相册 fp2-1-1 - 请求权限
                             self?.authorizationStatus()
                         }
                     }
                 }
             }
         }
-
     }
-    
-    
-
 }

+ 2 - 2
BFFramework/Classes/Stuckpoint/View/PQStuckPointMusicContentCell.swift

@@ -134,7 +134,7 @@ class PQStuckPointMusicContentCell: UICollectionViewCell {
                         playImageView.image = UIImage.moduleImage(named: "loading", moduleName: "BFFramework", isAssets: false)
                         startLoadingAnimation()
                     }else {
-                        playImageView.kf.setImage(with: URL(fileURLWithPath: (currentBundle()!.path(forResource: "stuckPoint_music_playing", ofType: ".gif")!)))
+                        playImageView.kf.setImage(with: URL(fileURLWithPath: (currentBundlePath()!.path(forResource: "stuckPoint_music_playing", ofType: ".gif")!)))
 
                     }
                     musicNameLab.move()
@@ -254,7 +254,7 @@ class PQStuckPointMusicContentCell: UICollectionViewCell {
     func stopLoadingAnimation(){
         playImageView.layer.removeAllAnimations()
         if (bgmData as? PQVoiceModel)?.isPlaying ?? false {
-            playImageView.kf.setImage(with: URL(fileURLWithPath: currentBundle()!.path(forResource: "stuckPoint_music_playing", ofType: ".gif")!))
+            playImageView.kf.setImage(with: URL(fileURLWithPath: currentBundlePath()!.path(forResource: "stuckPoint_music_playing", ofType: ".gif")!))
         }
     }
 }

+ 29 - 29
BFFramework/Classes/Stuckpoint/ViewModel/PQGPUImagePlayerView.swift

@@ -130,7 +130,7 @@ public class PQGPUImagePlayerView: UIView {
     var mStickers: [PQEditVisionTrackMaterialsModel]? {
         didSet {
             
-            FilterLog(2, message: "设置线程为: \(Thread.current) \(OperationQueue.current?.underlyingQueue?.label as Any)")
+            BFLog(2, message: "设置线程为: \(Thread.current) \(OperationQueue.current?.underlyingQueue?.label as Any)")
         
             configCache(beginTime: mStickers?.first?.timelineIn ?? 0)
         }
@@ -301,7 +301,7 @@ public class PQGPUImagePlayerView: UIView {
     // 设置画布比例
     public func resetCanvasFrame(frame: CGRect) {
         if self.frame.equalTo(frame) {
-            FilterLog(2, message: "新老值一样,不重置")
+            BFLog(2, message: "新老值一样,不重置")
             return
         }
 
@@ -313,7 +313,7 @@ public class PQGPUImagePlayerView: UIView {
             showBorderLayer()
         }
 
-        FilterLog(2, message: "new frame is \(frame)")
+        BFLog(2, message: "new frame is \(frame)")
         renderView.isHidden = true
         renderView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height)
         renderView.resatSize()
@@ -360,7 +360,7 @@ public class PQGPUImagePlayerView: UIView {
         stop()
         movie = nil
         speaker = nil
-        FilterLog(1, message: "play view release")
+        BFLog(1, message: "play view release")
     }
 
     /// XXXX 这里的 URL 使用的是全路径 ,如果不是全的会 crash ,方便复用 (不用处理业务的文件放在哪里)
@@ -371,11 +371,11 @@ public class PQGPUImagePlayerView: UIView {
         var composition: AVMutableComposition?
 
         let asset = AVURLAsset(url: url, options: nil)
-        FilterLog(1, message:  "播放器初始化的音频时长\(asset.duration.seconds)  url is \(url),最终使用时长\(originMusicDuration),裁剪范围\(CMTimeGetSeconds(clipAudioRange.start)) 到 \(CMTimeGetSeconds(clipAudioRange.end))")
+        BFLog(1, message:  "播放器初始化的音频时长\(asset.duration.seconds)  url is \(url),最终使用时长\(originMusicDuration),裁剪范围\(CMTimeGetSeconds(clipAudioRange.start)) 到 \(CMTimeGetSeconds(clipAudioRange.end))")
 
         self.asset = asset
         if (audioMixModel != nil && audioMixModel?.localPath != nil) || (videoStickers != nil && (videoStickers?.count ?? 0) > 0 || originMusicDuration != 0) {
-            FilterLog(2, message: "有参加混音的数据。")
+            BFLog(2, message: "有参加混音的数据。")
             (audioMix, composition) = PQPlayerViewModel.setupAudioMix(originAsset: asset, bgmData: audioMixModel, videoStickers: videoStickers,originMusicDuration:originMusicDuration,clipAudioRange: clipAudioRange)
         } else {
             audioMix = nil
@@ -397,7 +397,7 @@ public class PQGPUImagePlayerView: UIView {
 //        }
         do {
             if composition != nil {
-                FilterLog(2, message: "composition 方式初始化")
+                BFLog(2, message: "composition 方式初始化")
                 movie = try PQMovieInput(asset: composition!, videoComposition: videoComposition, audioMix: audioMix, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings)
 //                movie?.exportAudioUrl = url // clipAudioRange
                 var ranges = Array<CMTimeRange>()
@@ -434,7 +434,7 @@ public class PQGPUImagePlayerView: UIView {
         movie.progress = { [weak self] currTime, duration, prgressValue in
             guard let strongSelf = self else { return }
 
-//            FilterLog(1, message: " movie 进度\(currTime)")
+//            BFLog(1, message: " movie 进度\(currTime)")
             strongSelf.changeFilter(currTime: currTime)
             strongSelf.progress?(currTime, duration, prgressValue)
 
@@ -489,11 +489,11 @@ public class PQGPUImagePlayerView: UIView {
     /// - Parameter beginTime: 开始缓存的开始时间,用在 seek操作时 老的缓存已经无效不能在使用了
     func configCache(beginTime: Float64 ) {
         cacheFilters.removeAll()
-        FilterLog(2, message: "原素材 总数:\(mStickers?.count ?? 0) ")
+        BFLog(2, message: "原素材 总数:\(mStickers?.count ?? 0) ")
        
         if mStickers?.count ?? 0 > 0 {
             for (index, currentSticker) in mStickers!.enumerated() {
-                FilterLog(message: "mStickers timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) index : \(index)")
+                BFLog(message: "mStickers timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) index : \(index)")
                //到达最大缓存数退出
                 if cacheFilters.count == cacheFiltersMaxCount {
                     break
@@ -511,7 +511,7 @@ public class PQGPUImagePlayerView: UIView {
                     (showFitler as? PQImageFilter)?.isPointModel = ((mStickers?.count ?? 0) > 0)
                 }
                 if showFitler != nil {
-                    FilterLog(message: " 加入到缓存 的 filter timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) in :\(currentSticker.model_in) out: \(currentSticker.out) index : \(index)")
+                    BFLog(message: " 加入到缓存 的 filter timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) in :\(currentSticker.model_in) out: \(currentSticker.out) index : \(index)")
                     cacheFilters.append(showFitler!)
                 }
 
@@ -520,7 +520,7 @@ public class PQGPUImagePlayerView: UIView {
             DispatchQueue.global().async {[weak self] in
                 if let strongSelf = self {
                     for (index, filter) in strongSelf.cacheFilters.enumerated() {
-                        FilterLog(2, message: " 初始化 config create currentSticker timelinein \(String(describing: filter.stickerInfo?.timelineIn)) timelineout \(String(describing: filter.stickerInfo?.timelineOut))  in :\(String(describing: filter.stickerInfo?.model_in)) out \(String(describing: filter.stickerInfo?.out))  index\(index)")
+                        BFLog(2, message: " 初始化 config create currentSticker timelinein \(String(describing: filter.stickerInfo?.timelineIn)) timelineout \(String(describing: filter.stickerInfo?.timelineOut))  in :\(String(describing: filter.stickerInfo?.model_in)) out \(String(describing: filter.stickerInfo?.out))  index\(index)")
                     }
                 }
             }
@@ -538,12 +538,12 @@ public class PQGPUImagePlayerView: UIView {
 
     //创建下一个filter 数据
     func createNextFilter() {
-        FilterLog(2, message: "加入前 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
+        BFLog(2, message: "加入前 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
           if cacheFilters.count <=  cacheFiltersMaxCount {
               let showIndex = mStickers?.firstIndex(where: { (sticker) -> Bool in
                 (cacheFilters.last?.stickerInfo == sticker)
               })
-                FilterLog(2, message: "当前显示的showIndex: \(String(describing: showIndex))")
+                BFLog(2, message: "当前显示的showIndex: \(String(describing: showIndex))")
               if ((showIndex ?? 0) + 1) < (mStickers?.count ?? 0) {
                   let currentSticker = mStickers?[(showIndex ?? 0) + 1]
                   if currentSticker != nil {
@@ -560,11 +560,11 @@ public class PQGPUImagePlayerView: UIView {
                           cacheFilters.append(showFitler!)
                       }
                   }else{
-                    FilterLog(2, message: "缓存数据加入不成功!!!!!")
+                    BFLog(2, message: "缓存数据加入不成功!!!!!")
                   }
               }
             
-            FilterLog(2, message: "加入后 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
+            BFLog(2, message: "加入后 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
              
           }
         
@@ -576,7 +576,7 @@ public class PQGPUImagePlayerView: UIView {
     /// - Parameter currTime: 当前播放时间
     func changeFilter(currTime: Float64) {
 //        let  starts:CFTimeInterval = CFAbsoluteTimeGetCurrent()
-        FilterLog(message: " 要查找的 currTime is \(currTime)")
+        BFLog(message: " 要查找的 currTime is \(currTime)")
         //1,删除已经显示过的 filter
         self.cacheFilters.removeAll(where: {(filter) -> Bool in
 
@@ -590,16 +590,16 @@ public class PQGPUImagePlayerView: UIView {
 
         })
         if(showIndex == nil){
-            FilterLog(2, message: "缓存没有查找到?出现数据错误!!!!")
+            BFLog(2, message: "缓存没有查找到?出现数据错误!!!!")
             return
         }
   
         let showFilter: PQBaseFilter = cacheFilters[showIndex ?? 0]
         
-        FilterLog(2, message: "缓存操作   查找到命中的显示是为:\(currTime) 缓存数据timeline in :\(showFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(showFilter.stickerInfo?.timelineOut ?? 0.0) in:\(showFilter.stickerInfo?.model_in ?? 0.0) out:\(showFilter.stickerInfo?.out ?? 0.0) 缓存数 \(cacheFilters.count) index: \(String(describing: showIndex))")
+        BFLog(2, message: "缓存操作   查找到命中的显示是为:\(currTime) 缓存数据timeline in :\(showFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(showFilter.stickerInfo?.timelineOut ?? 0.0) in:\(showFilter.stickerInfo?.model_in ?? 0.0) out:\(showFilter.stickerInfo?.out ?? 0.0) 缓存数 \(cacheFilters.count) index: \(String(describing: showIndex))")
         
         if(!(showFilter.isShow)){
-            FilterLog(2, message: "showIndex当前时间为  \(currTime) showIndex is \(String(describing: showIndex)) 显示 filter timelineIn is: \(String(describing: showFilter.stickerInfo?.timelineIn)) timelineOut is: \(String(describing: showFilter.stickerInfo?.timelineOut))")
+            BFLog(2, message: "showIndex当前时间为  \(currTime) showIndex is \(String(describing: showIndex)) 显示 filter timelineIn is: \(String(describing: showFilter.stickerInfo?.timelineIn)) timelineOut is: \(String(describing: showFilter.stickerInfo?.timelineOut))")
  
             showFilter.isShow = true
             
@@ -611,13 +611,13 @@ public class PQGPUImagePlayerView: UIView {
             //画面的比例
             let canverAspectRatio = String(format: "%.6f",(movie?.mShowVidoSize.width ?? 0.0) /  (movie?.mShowVidoSize.height ?? 0.0))
             if(showFilter.stickerInfo?.type == StickerType.IMAGE.rawValue && showGaussianBlur && Float(stickerAspectRatio) != Float(canverAspectRatio)){
-                      FilterLog(2, message: "显示图片filter")
+                      BFLog(2, message: "显示图片filter")
 //                    //高斯层
                         let  blurStickerModel:PQEditVisionTrackMaterialsModel? = showFilter.stickerInfo?.copy() as? PQEditVisionTrackMaterialsModel
                         blurStickerModel?.canvasFillType = stickerContentMode.aspectFillStr.rawValue
 
                         if blurStickerModel == nil {
-                            FilterLog(2, message: "显示图片filter blurStickerModel is nil")
+                            BFLog(2, message: "显示图片filter blurStickerModel is nil")
                             return
                         }
                         let showGaussianFitler:PQBaseFilter = PQImageFilter(sticker: blurStickerModel!, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize)
@@ -631,7 +631,7 @@ public class PQGPUImagePlayerView: UIView {
                         iosb.addTarget(showFilter,atTargetIndex: 0)
                         showFilter.addTarget(self.renderView as ImageConsumer, atTargetIndex: 0)
                 
-                        FilterLog(2, message: "filter 添加成功 注意是否添加成功。")
+                        BFLog(2, message: "filter 添加成功 注意是否添加成功。")
                         
 //                    }
  
@@ -645,7 +645,7 @@ public class PQGPUImagePlayerView: UIView {
             }
 
         }else{
-            FilterLog(2, message: " 添加过了 currTime is \(currTime) timelineIn:\(showFilter.stickerInfo?.timelineIn ?? 0.0)")
+            BFLog(2, message: " 添加过了 currTime is \(currTime) timelineIn:\(showFilter.stickerInfo?.timelineIn ?? 0.0)")
         }
     }
 
@@ -683,7 +683,7 @@ public extension PQGPUImagePlayerView {
             self.progressLab.isHidden = false
         }
 //        guard status != .playing else {
-//            FilterLog(2, message: "已经是播放状态")
+//            BFLog(2, message: "已经是播放状态")
 //            return
 //        }
 
@@ -745,7 +745,7 @@ public extension PQGPUImagePlayerView {
 
     // 显示提示文字
     func showTip(show: Bool) {
-        FilterLog(2, message: "showTip \(show)")
+        BFLog(2, message: "showTip \(show)")
         tipLab.isHidden = !show
         if show {
             playerEmptyView.isHidden = true
@@ -785,15 +785,15 @@ public extension PQGPUImagePlayerView {
 // MARK: - RenderViewDelegate
 extension PQGPUImagePlayerView: RenderViewDelegate{
     public func willDisplayFramebuffer(renderView _: RenderView, framebuffer _: Framebuffer) {
-        FilterLog(2, message: "willDisplayFramebuffer")
+        BFLog(2, message: "willDisplayFramebuffer")
     }
 
     public func didDisplayFramebuffer(renderView _: RenderView, framebuffer: Framebuffer) {
-        FilterLog(2, message: "didDisplayFramebuffer")
+        BFLog(2, message: "didDisplayFramebuffer")
     }
 
     public func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool {
-        FilterLog(2, message: "didDisplayFramebuffer")
+        BFLog(2, message: "didDisplayFramebuffer")
         
         return false
     }

+ 2 - 4
BFFramework/Classes/Utils/PQRequestURLUtil.swift

@@ -402,12 +402,10 @@ public let stuckPointProjectMusicInfoUrl = "producevideo/getProjectRhythmMusicIn
 
 // 获取广告配置信息
 public let adPositionInfoUrl = "ad/position/info"
-// 意见反馈id
-public let feedbackAppId = "351068"
 // 意见反馈地址
-public let feedbackUrl = "https://support.qq.com/product/\(feedbackAppId)"
+public let feedbackUrl = "https://support.qq.com/product/\(BFConfig.shared.txFeedbackAppId)"
 // 意见反馈主页
-public let feedbackPageUrl = "https://support.qq.com/embed/phone/\(feedbackAppId)"
+public let feedbackPageUrl = "https://support.qq.com/embed/phone/\(BFConfig.shared.txFeedbackAppId)"
 
 
 // ***************** 激励相关地址 **********************//

+ 2 - 2
BFFramework/Classes/Utils/PQSingletoMemoryUtil.swift

@@ -57,7 +57,7 @@ public class PQSingletoMemoryUtil: NSObject {
     public var isColdLaunch: Bool = false // 冷启动
     public var coldLaunchStatus: Int = 0 // 1-请求中 2-请求成功 3-请求失败
     public var deviceToken: String = "" // 推送deviceToken
-//    var activityData: PQActivityModel? // 活动数据信息
+    public var activityData: Dictionary<String,Any>? // 活动数据信息
     public var isShowTodaySuccess: Bool = false // 是否已经展示过今日已完成
     public var sessionId: String = getUniqueId(desc: "sessionId")
     public var subSessionid: String?
@@ -77,7 +77,7 @@ public class PQSingletoMemoryUtil: NSObject {
     public var coverUrl: String?
 
     // 未读数字
-//    var unReadInfo: PQMsgReadInfoModel?
+    public var unReadInfo: Dictionary<String,Any>?
     public var abInfoData: [String: Any] = Dictionary<String, Any>.init() // 实验数据
     public var allExportSession: [PHAsset:AVAssetExportSession] = [PHAsset:AVAssetExportSession].init()
     public func updateTabPoint() {

+ 0 - 0
BFFramework/Classes/BFModules/BFUtility/PQSingletoVideoPlayer.swift → BFFramework/Classes/Utils/PQSingletoVideoPlayer.swift


+ 1 - 1
Example/Podfile.lock

@@ -154,7 +154,7 @@ EXTERNAL SOURCES:
 SPEC CHECKSUMS:
   Alamofire: f3b09a368f1582ab751b3fff5460276e0d2cf5c9
   AliyunOSSiOS: b8f1dfc229cd9abf68c8ee0cb245c2d66e00dd96
-  BFCommonKit: 697a9d162da8ee14ec75cc0f5d8e26a38a6faafe
+  BFCommonKit: fbebd7d46eaa7adaf5311aae2230b68ab5e99788
   BFFramework: 42273b58a4245e1c4891ae97edb53f72bddae1f4
   BFMaterialKit: a10f33e7748689a3eeffff3b18df9c350241ba8d
   BFNetRequestKit: 6b200205bd1a9491c04f5a3e95301d37a547f96b