浏览代码

Merge branch 'master' into module

* master:
  1.update refreshData
  1.update movieFilter
harry 3 年之前
父节点
当前提交
ebf15196df

+ 20 - 0
BFFramework/Classes/Base/ViewModel/PQUploadViewModel.swift

@@ -136,4 +136,24 @@ public class PQUploadViewModel: NSObject {
             completeHander(coverImages, nil)
         }
     }
+    
+    /// - Parameter completeHander: completeHander description
+    public class func getStsToken(completeHander: @escaping (_ userInfo: [String: Any]?, _ msg: String?) -> Void) {
+        BFNetRequestAdaptor.postRequestData(url: PQENVUtil.shared.longvideoapi + getStsTokenUrl, parames: ["fileType": "2", "type": 1], commonParams: commonParams()) { response, _, error, _ in
+            if error != nil {
+                completeHander(nil, error?.msg)
+                return
+            }
+            completeHander(response as? [String: Any], nil)
+        }
+    }
+    
+    /// 获取OSS
+    /// - Parameter completeHander: <#completeHander description#>
+    /// - Returns: <#description#>
+    public class func ossTempToken(completeHander: @escaping (_ userInfo: [String: Any]?, _ msg: String?) -> Void) {
+        BFNetRequestAdaptor.postRequestData(url: PQENVUtil.shared.longvideoapi + ossTempTokenUrl, parames: ["type": "2", "fileType": "1"], commonParams: commonParams()) { response, _, _, _ in
+            completeHander(response as? [String: Any], nil)
+        }
+    }
 }

+ 1 - 1
BFFramework/Classes/PQGPUImage/akfilters/PQGPUImageTools.swift

@@ -16,7 +16,7 @@ import BFCommonKit
 #endif
 
 /// 旋转角度模式
-enum NXGPUImageRotationMode: Int {
+public enum NXGPUImageRotationMode: Int {
     case noRotationTextureCoordinates
     case rotateLeftTextureCoordinates
     case rotateRightTextureCoordinates

+ 31 - 31
BFFramework/Classes/PQGPUImage/akfilters/PQMovieFilter.swift

@@ -53,19 +53,19 @@ import Foundation
 import UIKit
 import BFUIKit
 
-open class PQMovieFilter: PQBaseFilter {
+public class PQMovieFilter: PQBaseFilter {
     public var runBenchmark = false
 
     public weak var delegate: MovieInputDelegate?
 
-    var yuvConversionShader: ShaderProgram?
-    var asset: AVAsset?
-    var videoComposition: AVVideoComposition?
+    public var yuvConversionShader: ShaderProgram?
+    public var asset: AVAsset?
+    public var videoComposition: AVVideoComposition?
     // 使用原始速度
-    var playAtActualSpeed: Bool = true
+    public var playAtActualSpeed: Bool = true
 
     // Time in the video where it should start.
-    var requestedStartTime: CMTime?
+    public var requestedStartTime: CMTime?
 
     // Last sample time that played.
     public private(set) var currentTime: CMTime = .zero
@@ -74,46 +74,46 @@ open class PQMovieFilter: PQBaseFilter {
     // Can be used to check video encoding progress. Not called from main thread.
     public var progress: ((Double) -> Void)?
 
-    var audioSettings: [String: Any]?
+    public var audioSettings: [String: Any]?
 
-    var movieFramebuffer: Framebuffer?
+    public var movieFramebuffer: Framebuffer?
     public var framebufferUserInfo: [AnyHashable: Any]?
 
     @Atomic var assetReader: AVAssetReader?
 
-    var moveSticker: PQEditVisionTrackMaterialsModel?
+    public var moveSticker: PQEditVisionTrackMaterialsModel?
 
-    var videoSize: CGSize = .zero
+    public var videoSize: CGSize = .zero
 
     // 最后一帧图像数据 CMSampleBuffer 不会 deep copy 所以使用一个CVImageBuffer变量
-    var lastImageBuffer: CVImageBuffer?
+    public var lastImageBuffer: CVImageBuffer?
     //
     @Atomic var currentRenderImageBuffer: CVPixelBuffer?
-    var currentRenderImageBufferTimeStamp: CMTime = .zero
-    var currentRenderSampleBuffer: CMSampleBuffer?
+    public var currentRenderImageBufferTimeStamp: CMTime = .zero
+    public var currentRenderSampleBuffer: CMSampleBuffer?
     // 旋转角度值
-    var mImageOrientation: ImageOrientation = .portrait
+    public var mImageOrientation: ImageOrientation = .portrait
 
-    var inputSize: GLSize = GLSize(width: 0, height: 0)
+    public var inputSize: GLSize = GLSize(width: 0, height: 0)
 
-    var timebaseInfo = mach_timebase_info_data_t()
+    public var timebaseInfo = mach_timebase_info_data_t()
 
-    var currentThread: Thread?
+    public var currentThread: Thread?
     /// Use serial queue to ensure that the picture is smooth
 //    var seekQueue: DispatchQueue!
 
     // 原视频素材的 FPS
-    var stickerFPS: Float = 0
+    public var stickerFPS: Float = 0
 
     // 开始时间,创建 filter 显示的时候有
-    var startTimeStamp: CMTime?
+    public var startTimeStamp: CMTime?
     // 最后一次显示帧时间戳
-    var targetTimeStamp: CMTime = .zero
+    public var targetTimeStamp: CMTime = .zero
     
     // 当前帧 id
-    var framebufferIndex:Int = 0
+    public var framebufferIndex:Int = 0
     
-    var imageVertexBuffer: GLuint = 0
+    public var imageVertexBuffer: GLuint = 0
 
     deinit {
         FilterLog(1, message: "movie filter release")
@@ -189,7 +189,7 @@ open class PQMovieFilter: PQBaseFilter {
 //        let currTime = CMTimeGetSeconds(CMTime(value: framebuffer.timingStyle.timestamp!.value, timescale: framebuffer.timingStyle.timestamp!.timescale))
     }
 
-    open override func renderFrame() {
+    public override func renderFrame() {
         let inputFramebuffer: Framebuffer = inputFramebuffers[0]!
         inputSize = inputFramebuffer.sizeForTargetOrientation(.portrait)
 
@@ -242,7 +242,7 @@ open class PQMovieFilter: PQBaseFilter {
     }
 
     // 原视频角度类型
-    func moveAssetRotation() -> NXGPUImageRotationMode {
+    public func moveAssetRotation() -> NXGPUImageRotationMode {
         let Angle: Int = PQPHAssetVideoParaseUtil.videoRotationAngle(assert: asset!)
 //        FilterLog(2, message: "原视频素材Angle is \(Angle)")
         // see https://my.oschina.net/NycoWang/blog/904105
@@ -283,7 +283,7 @@ open class PQMovieFilter: PQBaseFilter {
 
     // MARK: Internal processing functions
 
-    func createReader() -> AVAssetReader? {
+    public func createReader() -> AVAssetReader? {
         do {
             let outputSettings: [String: AnyObject] =
                 [kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: Int32(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange))]
@@ -316,7 +316,7 @@ open class PQMovieFilter: PQBaseFilter {
         return nil
     }
 
-    open func startReading() {
+    public func startReading() {
         FilterLog(2, message: "开始初始化")
         mach_timebase_info(&timebaseInfo)
 
@@ -343,14 +343,14 @@ open class PQMovieFilter: PQBaseFilter {
     }
 
     // 设置解码开始时间
-    func resetRangeTime(startTime: CMTime = .zero) {
+    public func resetRangeTime(startTime: CMTime = .zero) {
         FilterLog(2, message: "\(String(describing: moveSticker?.locationPath)) 取帧的时间 \(CMTimeGetSeconds(requestedStartTime ?? .zero))")
         requestedStartTime = startTime
         startReading()
     }
 
     // 取出第一帧数据
-    func readNextVideoFrame(showTimeStamp: CMTime) {
+    public func readNextVideoFrame(showTimeStamp: CMTime) {
         // XXXX 有时渲染视频取出来的画面时为黑屏,再渲染一次,数据是没有问题已经保存到沙盒进行验证,这个不是最好的方案!
         if lastImageBuffer != nil {
             renderPixelBuffler(movieFrame: lastImageBuffer!, withSampleTime: currentTime)
@@ -450,7 +450,7 @@ open class PQMovieFilter: PQBaseFilter {
     /// - Parameters:
     ///   - movieFrame:帧数据
     ///   - withSampleTime: 渲染时间戳,不是帧的 PTS 是渲染的时间
-    func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
+    public func renderPixelBuffler(movieFrame: CVPixelBuffer, withSampleTime: CMTime) {
         // NV12 会返回 2,Y分量和UV 分量, 如果buffer 是BGRA 则返回0
         FilterLog(2, message: "CVPixelBufferGetPlaneCount is \(CVPixelBufferGetPlaneCount(movieFrame))")
 
@@ -570,7 +570,7 @@ open class PQMovieFilter: PQBaseFilter {
         secondChrominanceFramebuffer?.unlock()
     }
 
-    func nanosToAbs(_ nanos: UInt64) -> UInt64 {
+    public func nanosToAbs(_ nanos: UInt64) -> UInt64 {
         return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
     }
 }
@@ -590,7 +590,7 @@ extension UIImage {
 //        self.init(cgImage: cgImage)
 //    }
 
-    func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
+   public func saveImage(currentImage: UIImage, persent: CGFloat, imageName: String) {
         if let imageData = currentImage.jpegData(compressionQuality: persent) {
             let fullPath = NSHomeDirectory().appending("/Documents/").appending(imageName)
 

+ 1 - 1
BFFramework/Classes/Stuckpoint/Controller/PQStuckPointMaterialController.swift

@@ -361,7 +361,7 @@ public class PQStuckPointMaterialController: BFBaseViewController {
             if isChose {
                 materialListView.addMaterialData(materialData: materialData!)
             } else {
-                photoMaterialVc.deSeletedMaterialData(materialData: materialData)
+                photoMaterialVc.deSeletedMaterialData(indexPath: nil, materialData: materialData)
             }
         }
         if isChose && !isDissmiss && materialListView.frame.minY > (bottomRemindView.frame.minY - 88) {