瀏覽代碼

update confli

wenweiwei 3 年之前
父節點
當前提交
471ebd2bb4

+ 1 - 1
BFStuckPointKit.podspec

@@ -37,7 +37,7 @@ TODO: Add long description of the pod here.
   s.dependency 'BFAnalyzeKit'
   s.dependency 'AliyunOSSiOS'             ,'2.10.10'  # 阿里云组件
   s.dependency 'WechatOpenSDK-Swift'      ,'1.8.7.1'  # 微信组件
-  s.dependency 'MJRefresh'                ,'3.7.2'    # 刷新组件
+  s.dependency 'MJRefresh'                ,'~> 3.0'    # 刷新组件
   s.dependency 'LMJHorizontalScrollText'  ,'2.0.2'
   s.dependency 'TXLiteAVSDK_Player'       ,'9.3.10765' # 腾讯播放器组件
   s.dependency 'Bugly'                    ,'2.5.90'   #crash log 收集

+ 0 - 144
BFStuckPointKit/Classes/BFUtils/PQSingletoMemoryUtil.swift

@@ -1,144 +0,0 @@
-//
-//  PQSingletoMemoryUtil.swift
-//  PQSpeed
-//
-//  Created by SanW on 2020/6/9.
-//  Copyright © 2020 BytesFlow. All rights reserved.
-//
-
-import UIKit
-import Photos
-import BFCommonKit
-
-// MARK: - 内存缓存数据
-
-/// 内存缓存数据r
-public class PQSingletoMemoryUtil: NSObject {
-    public static let shared = PQSingletoMemoryUtil()
-    public var isShowAttendPoint: Bool = false // 是否有关注更新
-    public var attendIdosUpdateCount: Int = 0 // 关注更新条数
-    public var isLeftSlipRemind: Bool = false
-    public var needLogin: Bool = false // 是否需要显示手机登录
-    public var isFinishedCoging: Bool = false // 是否已完成配置请求
-    public var isFinishedAlias: Bool = false // 是否完成绑定别名
-    public var selectedTabIndex: String?
-    public var commandVideoItem: BFVideoItemProtocol? // 口令内容
-    public var commandLaunchParams: String = "" // 口令启动时的值
-    public var commandReportParams: [String: Any]? // 口令启动时解析的其他值
-    public var h5MsgVid: String? // 剪切板id
-    public var isShowNoWiFiRemind: Bool = false // 是否提示过非WiFi提示
-    public var isNeedRefreshAttend: Bool = false // 是否需要刷新关注
-    public var isNeedRefreshJoinTopic: Bool = false // 是否需要刷新加入的话题
-    
-    // 剪切板content是否为空,如果为空走承接逻辑,冷启动不再加载缓存
-    public var isEmptyPasteContent: Bool = false {
-        didSet {
-            BFLog(message: "isEmptyPasteContent = \(isEmptyPasteContent)")
-        }
-    }
-
-    // 展示无服务提示视图(剪切板 data.reportData.downloadButtonType为 weapp_share_noServiceDownloadApp/weapp_categoryTab_noServiceDownloadApp
-    public var isShowPasteNoServeView: Bool = false {
-        didSet {
-            BFLog(message: "isShowPasteNoServeView = \(isShowPasteNoServeView)")
-        }
-    }
-    /// 是否正在展示广告view
-    public var isLoadingSplashAdView: Bool = false {
-        didSet {
-            BFLog(message: "isLoadingSplashAdView = \(isLoadingSplashAdView)")
-        }
-    }
-    public var cutBoardInfo: String? // 剪切板信息
-    public var cutBoard: String? // 剪切板来源cutBoardInfo
-    public var isPushLoading: Bool = false // 是否正在加载推送数据
-
-    public var coldLaunchType: coldLaunchType?
-    public var isColdLaunch: Bool = false // 冷启动
-    public var coldLaunchStatus: Int = 0 // 1-请求中 2-请求成功 3-请求失败
-    public var deviceToken: String = "" // 推送deviceToken
-    public var activityData: Dictionary<String,Any>? // 活动数据信息
-    public var isShowTodaySuccess: Bool = false // 是否已经展示过今日已完成
-    public var sessionId: String = getUniqueId(desc: "sessionId")
-    public var subSessionid: String?
-    public var isDefaultAtten: Bool = false // 是否默认进入关注
-    public var isDefaultMineSingleVideoDetail: Bool = false // 是否默认进入我的单个视频详情
-    public var playCount: Int = 0 // 播放次数
-    public var uploadDatas: [BFVideoItemProtocol] = Array<BFVideoItemProtocol>.init() // 正在上传的视频集合
-    public var isShowUpSlideGuide: Bool = false // 是否展示了滑动向上的提示
-    public var showUpSlideData: PQVideoListModel? // 是否展示了滑动向上的提示
-    public var makeVideoProjectId: String? // 制作视频项目Id
-    public var draftboxId: String? // 草稿箱ID
-    // add by ak 结构化数据 saveDraft api 要 用到的参数
-    public var sdata: String?
-    // add by ak 结构化数据 saveDraft api 要 用到的参数
-    public var title: String?
-    // add by ak 结构化数据 saveDraft api 要 用到的参数
-    public var coverUrl: String?
-
-    // 未读数字
-    public var unReadInfo: Dictionary<String,Any>?
-    public var abInfoData: [String: Any] = Dictionary<String, Any>.init() // 实验数据
-    public var allExportSession: [PHAsset:AVAssetExportSession] = [PHAsset:AVAssetExportSession].init()
-    public func updateTabPoint() {
-//        let tabBar =
-//            rootViewController()?.tabBar
-//        if isShowAttendPoint {
-//            tabBar?.showPoint(index: 1)
-//        } else {
-////          tabBar.removePoint(index: 1)
-//        }
-    }
-
-    /// 创建sessionId
-    /// - Returns: <#description#>
-   public func createSesstionId() {
-        sessionId = getUniqueId(desc: "sessionId")
-        subSessionid = sessionId
-        BFLog(message: "生成的sessionId = \(sessionId)")
-    }
-
-    /// 创建subSessionid
-    /// - Returns: <#description#>
-    public func createSubSesstionId() {
-        subSessionid = getUniqueId(desc: "subSessionid")
-        BFLog(message: "生成的subSessionid = \(String(describing: subSessionid))")
-    }
-
-    /// 制作视频项目Id
-    /// - Returns: <#description#>
-    public func createMakeVideoProjectId() {
-        makeVideoProjectId = cProjectIdPrefix + getUniqueId(desc: "makeVideoProjectId")
-        BFLog(message: "生成的projectId = \(String(describing: makeVideoProjectId))")
-    }
-
-    /// 解析abInfoData
-    /// - Parameter abInfo: <#abInfo description#>
-    /// - Returns: description
-    public func parasABinfoData(abInfo: String?) {
-        if abInfo != nil, (abInfo?.count ?? 0) > 0, abInfo != "{}" {
-            guard let infoDic = jsonStringToDictionary(abInfo!) else { return }
-            abInfoData.merge(infoDic, uniquingKeysWith: { (key, _) -> Any in
-                key
-            })
-            BFLog(message: "=====abInfoData = \(abInfoData)")
-        }
-    }
-
-    override private init() {
-        super.init()
-        selectedTabIndex = getUserDefaults(key: cSelectedTabIndex) as? String
-        if selectedTabIndex == nil {
-            saveUserDefaults(key: cSelectedTabIndex, value: "categoryTab")
-            selectedTabIndex = "categoryTab"
-        }
-    }
-
-    public override func copy() -> Any {
-        return self
-    }
-
-    public override func mutableCopy() -> Any {
-        return self
-    }
-}

+ 0 - 803
BFStuckPointKit/Classes/ViewModel/PQGPUImagePlayerView.swift

@@ -1,803 +0,0 @@
-//
-//  PQGPUImagePlayer.swift
-//  GPUImage_iOS
-//
-//  Created by ak on 2020/8/27.
-//  Copyright © 2020 Sunset Lake Software LLC. All rights reserved.
-//  功能:滤镜播放器 支持音频 https://juejin.im/post/6844904024760664078 这个有用
-
-import AVFoundation
-import AVKit
-import UIKit
-import ObjectMapper
-import BFCommonKit
-import BFMediaKit
-
-// import GPUImage
-struct AVAssetKey {
-    static let tracks = "tracks"
-    static let duration = "duration"
-    static let metadata = "commonMetadata"
-}
-
-// 播放器状态
-public enum PQGPUImagePlayerViewStatus: Int {
-    case playing = 10
-    case pause = 20
-    case stop = 30
-    case error = 0
-    case unknow = -1000
-}
-
-public class PQGPUImagePlayerView: UIView {
-     
-    public private(set) var playbackTime: TimeInterval = 0 {
-        willSet {
-            playbackTimeChangeClosure?(newValue)
-        }
-    }
-
-    public var mCanverSize: CGSize = .zero
-
-    // 自动隐藏边框
-    public var isAutoHiden: Bool = false
-
-    // 是否显示边框
-    public  var isShowLine: Bool = true
-
-    // 播放进度
-    public var playbackTimeChangeClosure: ((_ time: TimeInterval) -> Void)?
-    // 参数说明:1,当前时间 2,总时长 3,进度
-    public var progress: ((Double, Double, Double) -> Void)?
-
-    /// 预览区域点击回调
-    public  var renderViewOnClickHandle: (() -> Void)?
-
-    public private(set) var asset: AVAsset?
-
-    public var duration: TimeInterval {
-        return asset?.duration.seconds ?? 0
-    }
-
-    public private(set) var status: PQGPUImagePlayerViewStatus = .unknow {
-        willSet {
-            statusChangeClosure?(newValue)
-        }
-    }
-
-    public var statusChangeClosure: ((_ status: PQGPUImagePlayerViewStatus) -> Void)?
-
-    public private(set) var isReadyToPlay = false {
-        willSet {
-            assetLoadClosure?(newValue)
-        }
-    }
-
-    public var assetLoadClosure: ((_ isReadyToPlay: Bool) -> Void)?
-
-    /// Called when video finished
-    /// This closure will not called if isLoop is true
-    public var finishedClosure: (() -> Void)?
-
-    /// Set this attribute to true will print debug info
-    public var enableDebug = false {
-        willSet {
-            movie?.runBenchmark = newValue
-        }
-    }
-
-    /// Setting this attribute before the end of the video works
-    public var isLoop = false {
-        willSet {
-            movie?.loop = newValue
-        }
-    }
-
-    /// The player will control the animationLayer of animation with the property `timeOffset`
-    /// You can set up some animations in this layer like caption
-    public var animationLayer: CALayer? {
-        willSet {
-            // Set speed to 0, use timeOffset to control the animation
-            newValue?.speed = 0
-
-            newValue?.timeOffset = playbackTime
-        }
-        didSet {
-            oldValue?.removeFromSuperlayer()
-        }
-    }
-
-    /// Add filters to this array and call updateAsset(_:) method
-    public var filters: [ImageProcessingOperation] = []
-
-    public var movie: PQMovieInput?
-
-    public var speaker: SpeakerOutput?
-
-    /// Volumn of original sounds in AVAsset
-    public var originVolumn: Float = 1.0 {
-        didSet {}
-    }
-
-    public var playerLayer: AVPlayerLayer?
-    public var player: AVPlayer?
-
-    public var playerEmptyView: UIImageView!
-
-    public var borderLayer: CAShapeLayer?
-
-    public var mPlayeTimeRange: CMTimeRange?
-
-    var mStickers: [PQEditVisionTrackMaterialsModel]? {
-        didSet {
-            
-            BFLog(2, message: "设置线程为: \(Thread.current) \(OperationQueue.current?.underlyingQueue?.label as Any)")
-        
-            configCache(beginTime: mStickers?.first?.timelineIn ?? 0)
-        }
-    }
- 
-    // 是否显示时间条
-    var showProgressLab: Bool = true
-
-    // 缓存创建filter 防止 seek 100ms 慢
-    @Atomic var cacheFilters: Array<PQBaseFilter> = Array()
-    // 缓存个数 XXXX 经过测试如果是4K 视频解码器不能创建太多,4是可以工作
-    var cacheFiltersMaxCount: Int = 8
-  
-    /// Use serial queue to ensure that the picture is smooth
-    var createFiltersQueue: DispatchQueue!
-    
-    //是否显示高斯
-    public  var showGaussianBlur:Bool = false
-    
-    //是否使用AVPlayer播放音乐
-    public var isUsedAVPlayer:Bool = false
-
-    // 渲染区view
-    private lazy var renderView: RenderView = {
-        let view = RenderView()
-        view.backgroundColor = BFConfig.shared.styleBackGroundColor
-        view.frame = self.bounds
-        view.delegate = self
-        let tap = UITapGestureRecognizer(target: self, action: #selector(RenderViewOnclick))
-        view.addGestureRecognizer(tap)
- 
-        view.backgroundRenderColor =  Color.init(red: Float(BFConfig.shared.styleBackGroundColor.rgbaf[0]), green: Float(BFConfig.shared.styleBackGroundColor.rgbaf[1]), blue: Float(BFConfig.shared.styleBackGroundColor.rgbaf[2]))
-
-        return view
-    }()
-
-    // 暂停播放view
-    lazy var playView: UIImageView = {
-        let view = UIImageView(frame: CGRect(x: (self.frame.size.width - self.frame.size.height / 3.6) / 2, y: (self.frame.size.height - self.frame.size.height / 3.6) / 2, width: self.frame.size.height / 3.6, height: self.frame.size.height / 3.6))
-//        view.tintColor = UIColor.white
-        view.image = UIImage.moduleImage(named: "gpuplayBtn", moduleName: "BFStuckPointKit",isAssets: false)?.withRenderingMode(.alwaysTemplate)
-        view.tintColor = UIColor.hexColor(hexadecimal: BFConfig.shared.styleColor.rawValue)
-        view.isHidden = true
-        return view
-
-    }()
-    
-    // 暂停播放view
-    lazy var playMaskView: UIView = {
-        let playMaskView = UIView.init()
-        playMaskView.backgroundColor = UIColor.init(red: 0, green: 0, blue: 0, alpha: 0.5)
-        playMaskView.isUserInteractionEnabled = false
-        playMaskView.isHidden = true
-        return playMaskView
-
-    }()
-
-    // 播放进度/总时长
-    lazy var progressLab: UILabel = {
-        let titleLab = UILabel(frame: CGRect(x: (self.frame.size.width - 140) / 2, y: 0, width: 140, height: 12))
-        titleLab.font = UIFont.systemFont(ofSize: 12, weight: .medium)
-        titleLab.textColor = UIColor.white
-        titleLab.textAlignment = .center
-        titleLab.text = ""
-        titleLab.layer.shadowColor = UIColor.black.cgColor
-        titleLab.layer.shadowOpacity = 0.3
-        titleLab.layer.shadowOffset = .zero
-        titleLab.layer.shadowRadius = 1
-//        titleLab.backgroundColor = UIColor.hexColor(hexadecimal: "#FFFFFF",alpha: 0.3)
-//        titleLab.addCorner(corner:7)
-
-        return titleLab
-
-    }()
-
-    lazy var tipLab: UILabel = {
-        let tipLab = UILabel(frame: CGRect(x: (self.frame.size.width - 100) / 2, y: (self.frame.size.height - 14) / 2, width: 100, height: 14))
-        tipLab.font = UIFont.systemFont(ofSize: 14, weight: .medium)
-        tipLab.textColor = UIColor.white
-        tipLab.textAlignment = .center
-        tipLab.text = "资源加载中..."
-        tipLab.layer.shadowColor = UIColor.white.cgColor
-        tipLab.layer.shadowOpacity = 0.5
-        tipLab.layer.shadowOffset = .zero
-        tipLab.layer.shadowRadius = 1
-        tipLab.isHidden = true
-        return tipLab
-
-    }()
-
-    //进度的开始时间
-    var showProgressStartTime:Float = 0.0
-
-    required public init?(coder _: NSCoder) {
-        fatalError("init(coder:) has not been implemented")
-    }
-    
-    override public init(frame: CGRect) {
-        super.init(frame: frame)
-
-        
-        addSubview(renderView)
-        addSubview(progressLab)
-        addSubview(playMaskView)
-        addSubview(playView)
-     
-        backgroundColor = BFConfig.shared.styleBackGroundColor
-        playerEmptyView = UIImageView(frame: bounds)
-        playerEmptyView.backgroundColor = .black
-        playerEmptyView.image = UIImage.moduleImage(named: "playEmpty", moduleName: "BFStuckPointKit",isAssets: false)
-        playerEmptyView.contentMode = .center
-        addSubview(playerEmptyView)
-
-        addSubview(tipLab)
-        
-        if #available(iOS 10.0, *) {
-            createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .default, attributes: .initiallyInactive, autoreleaseFrequency: .never, target: nil)
-        } else {
-            createFiltersQueue = DispatchQueue(label: "PQ.moveFiler.seeking111", qos: .userInteractive, attributes: [], autoreleaseFrequency: .inherit, target: nil)
-        }
-        if #available(iOS 10.0, *) {
-            createFiltersQueue.activate()
-        }
-    }
-
-    func showBorderLayer() {
-        if borderLayer != nil {
-            borderLayer?.removeFromSuperlayer()
-        }
-        // 线条颜色
-        borderLayer = CAShapeLayer()
-        borderLayer?.strokeColor = UIColor.hexColor(hexadecimal: "#FFFFFF").cgColor
-        borderLayer?.fillColor = nil
-        borderLayer?.path = UIBezierPath(rect: CGRect(x: 1, y: 1, width: bounds.width - 2, height: bounds.height - 2)).cgPath
-        borderLayer?.frame = bounds
-        borderLayer?.lineWidth = 2.0
-        borderLayer?.lineCap = .round
-        // 第一位是 线条长度   第二位是间距 nil时为实线
-        if borderLayer != nil {
-            renderView.layer.addSublayer(borderLayer!)
-        }
-        
-
-        if isAutoHiden {
-            borderLayer?.opacity = 0
-            let groupAnimation = CAAnimationGroup()
-            groupAnimation.beginTime = CACurrentMediaTime()
-            groupAnimation.duration = 1
-            groupAnimation.fillMode = .forwards
-            groupAnimation.isRemovedOnCompletion = true
-            groupAnimation.repeatCount = 3
-
-            let opacity = CABasicAnimation(keyPath: "opacity")
-            opacity.fromValue = 0
-            opacity.toValue = 1
-            opacity.isRemovedOnCompletion = true
-
-            let opacity2 = CABasicAnimation(keyPath: "opacity")
-            opacity2.fromValue = 1
-            opacity2.toValue = 0
-            opacity2.isRemovedOnCompletion = false
-            groupAnimation.animations = [opacity, opacity2]
-
-            borderLayer?.add(groupAnimation, forKey: nil)
-        }
-    }
-
-    // 设置画布比例
-    public func resetCanvasFrame(frame: CGRect) {
-        if self.frame.equalTo(frame) {
-            BFLog(2, message: "新老值一样,不重置")
-            return
-        }
-
-        self.frame = frame
-        
-        mCanverSize = frame.size
-
-        if isShowLine {
-            showBorderLayer()
-        }
-
-        BFLog(2, message: "new frame is \(frame)")
-        renderView.isHidden = true
-        renderView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height)
-        renderView.resatSize()
-
-        playerEmptyView.frame = CGRect(x: 0, y: 0, width: self.frame.size.width, height: self.frame.size.height)
-        playMaskView.frame = CGRect.init(x: 0, y: 0, width: self.frame.width, height: self.frame.height)
-        tipLab.frame = CGRect(x: (self.frame.size.width - 100) / 2, y: (self.frame.size.height - 14) / 2, width: 100, height: 14)
-        progressLab.frame = CGRect(x: (self.frame.size.width - 140) / 2, y: 8, width: 140, height: 14)
-
-        let bord = frame.size.width > frame.size.height ? CGFloat(60) : CGFloat(60)
-        playView.frame = CGRect(x: (CGFloat(frame.size.width) - bord) / 2 , y: (CGFloat(frame.size.height) - bord) / 2, width: bord, height: bord)
-    
-//        playView.frame = CGRect(x: (self.frame.size.width - self.frame.size.height / 3.6) / 2, y: (self.frame.size.height - self.frame.size.height / 3.6) / 2, width: self.frame.size.height / 3.6, height: self.frame.size.height / 3.6)
-    }
-
-    override public func layoutSubviews() {
-        super.layoutSubviews()
-    }
-
-    @objc func RenderViewOnclick() {
-        if status == .playing {
-            playView.isHidden = false
-            playMaskView.isHidden = false
-            pause()
-
-        } else if status == .stop || status == .pause {
-            playView.isHidden = true
-            playMaskView.isHidden = true
-            movie?.resume()
-            speaker?.start()
-            status = .playing
-        }
-        if renderViewOnClickHandle != nil {
-            renderViewOnClickHandle!()
-        }
-    }
-
-    func showPlayBtn(isHidden: Bool) {
-        playView.isHidden = isHidden
-        playMaskView.isHidden = isHidden
-    }
-
-    deinit {
-        stop()
-        movie = nil
-        speaker = nil
-        BFLog(1, message: "play view release")
-    }
-
-    /// XXXX 这里的 URL 使用的是全路径 ,如果不是全的会 crash ,方便复用 (不用处理业务的文件放在哪里)
-    public func updateAsset(_ url: URL, videoComposition: AVVideoComposition? = nil, audioMixModel: PQVoiceModel? = nil, videoStickers: [PQEditVisionTrackMaterialsModel]? = nil,originMusicDuration:Float = 0,lastPoint:Float = 0,clipAudioRange: CMTimeRange = CMTimeRange.zero ,isUsedAVPlayer:Bool = false) {
-        self.isUsedAVPlayer = isUsedAVPlayer
-        // 每次初始化的时候设置初始值 为 nIl
-        var audioMix: AVMutableAudioMix?
-        var composition: AVMutableComposition?
-
-        let asset = AVURLAsset(url: url, options: nil)
-        BFLog(1, message:  "播放器初始化的音频时长\(asset.duration.seconds)  url is \(url),最终使用时长\(originMusicDuration),裁剪范围\(CMTimeGetSeconds(clipAudioRange.start)) 到 \(CMTimeGetSeconds(clipAudioRange.end))")
-
-        self.asset = asset
-        if (audioMixModel != nil && audioMixModel?.localPath != nil) || (videoStickers != nil && (videoStickers?.count ?? 0) > 0 || originMusicDuration != 0) {
-            BFLog(2, message: "有参加混音的数据。")
-            (audioMix, composition) = PQPlayerViewModel.setupAudioMix(originAsset: asset, bgmData: audioMixModel, videoStickers: videoStickers,originMusicDuration:originMusicDuration,clipAudioRange: clipAudioRange)
-        } else {
-            audioMix = nil
-        }
-
-        isReadyToPlay = false
-        asset.loadValuesAsynchronously(forKeys: ["tracks", "duration", "commonMetadata"]) { [weak self] in
-            guard let strongSelf = self else { return }
-            let tracksStatus = strongSelf.asset?.statusOfValue(forKey: AVAssetKey.tracks, error: nil) ?? .unknown
-            let durationStatus = strongSelf.asset?.statusOfValue(forKey: AVAssetKey.duration, error: nil) ?? .unknown
-            strongSelf.isReadyToPlay = tracksStatus == .loaded && durationStatus == .loaded
-        }
-        var audioSettings: [String: Any] = [
-            AVFormatIDKey: kAudioFormatLinearPCM,
-        ]
-//        if #available(iOS 14.0, *) {
-            audioSettings[AVLinearPCMIsFloatKey] = false
-            audioSettings[AVLinearPCMBitDepthKey] = 16
-//        }
-        do {
-            if composition != nil {
-                BFLog(2, message: "composition 方式初始化")
-                movie = try PQMovieInput(asset: composition!, videoComposition: videoComposition, audioMix: audioMix, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings)
-//                movie?.exportAudioUrl = url // clipAudioRange
-                var ranges = Array<CMTimeRange>()
-                if CMTimeGetSeconds(clipAudioRange.duration) ==  0 {
-                    let range = CMTimeRange(start: CMTime.zero, duration: asset.duration)
-                    ranges.append(range)
-                }else{
-                    ranges.append(clipAudioRange)
-                }
-                movie?.configAVPlayer(assetUrl: url, ranges: ranges)
-            } else {
-                movie = try PQMovieInput(url: url, playAtActualSpeed: true, loop: isLoop, audioSettings: audioSettings)
-
-                /* 测试代码
-                 let audioDecodeSettings = [AVFormatIDKey:kAudioFormatLinearPCM]
-                 let bundleURL = Bundle.main.resourceURL!
-                 let movieURL = URL(string:"11111.mp4", relativeTo:bundleURL)!
-                 movie = try MovieInput(url:movieURL, playAtActualSpeed:true, loop:true, audioSettings:audioDecodeSettings)
-                 */
-            }
-
-            movie!.runBenchmark = false
-            movie!.synchronizedEncodingDebug = false
-            
-            movie!.isUsedAVPlayer = isUsedAVPlayer
-
-        } catch {
-            status = .error
-            if enableDebug {
-                debugPrint(error)
-            }
-        }
-        guard let movie = movie else { return }
-        movie.progress = { [weak self] currTime, duration, prgressValue in
-            guard let strongSelf = self else { return }
-
-//            BFLog(1, message: " movie 进度\(currTime)")
-            strongSelf.changeFilter(currTime: currTime)
-            strongSelf.progress?(currTime, duration, prgressValue)
-
-            DispatchQueue.main.async {
-                strongSelf.playbackTime = currTime
-
-                // Non-main thread change this property is not valid
-                strongSelf.animationLayer?.timeOffset = strongSelf.playbackTime
-                if strongSelf.showProgressLab {
-
-                    if(strongSelf.showProgressStartTime == 0 ){
-                        strongSelf.showProgressStartTime = Float(CMTimeGetSeconds(strongSelf.movie?.startTime ?? .zero))
-                    }
-                    if duration < 1 {
-
-                        strongSelf.progressLab.text =  "\((currTime - Double(CMTimeGetSeconds(strongSelf.movie?.startTime ?? .zero))).formatDurationToHMS()) / 00:01"
-                    } else {
-
-                        var showTime = currTime -  Double(strongSelf.showProgressStartTime)
-                        if (showTime < 0){
-                            showTime = 0
-                        }
-                        strongSelf.progressLab.text = "\(showTime.formatDurationToHMS()) / \( (duration - Double(strongSelf.showProgressStartTime)).formatDurationToHMS())"
-                    }
-                }
-            }
-        }
-        movie.completion = { [weak self] in
-            guard let strongSelf = self else { return }
-            //缓存已经用完,重新初始化缓存
-            if(strongSelf.filters.count == 0){
-                strongSelf.configCache(beginTime: strongSelf.mStickers?.first?.timelineIn ?? 0)
-            }
-            
-            DispatchQueue.main.async {
-                strongSelf.status = .stop
-                strongSelf.finishedClosure?()
-                strongSelf.showPlayBtn(isHidden: false)
-                if(strongSelf.progress != nil){
-                    strongSelf.progress!(0,0,1)
-                }
-                
-            }
-        }
-        speaker = SpeakerOutput()
-        movie.audioEncodingTarget = speaker
-
-        applyFilters()
-    }
-
-    /// 初始化缓存,默认选创建 cacheFiltersMaxCount 个缓存 filterrs
-    /// - Parameter beginTime: 开始缓存的开始时间,用在 seek操作时 老的缓存已经无效不能在使用了
-    func configCache(beginTime: Float64 ) {
-        cacheFilters.removeAll()
-        BFLog(2, message: "原素材 总数:\(mStickers?.count ?? 0) ")
-       
-        if mStickers?.count ?? 0 > 0 {
-            for (index, currentSticker) in mStickers!.enumerated() {
-                BFLog(message: "mStickers timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) index : \(index)")
-               //到达最大缓存数退出
-                if cacheFilters.count == cacheFiltersMaxCount {
-                    break
-                }
-                //小于缓存的开始时间继续查找
-                if(currentSticker.timelineOut < beginTime){
-                    continue
-                }
-                var showFitler: PQBaseFilter?
-                if currentSticker.type == StickerType.VIDEO.rawValue {
-                    showFitler = PQMovieFilter(movieSticker: currentSticker)
-
-                } else if currentSticker.type == StickerType.IMAGE.rawValue {
-                    showFitler = PQImageFilter(sticker: currentSticker, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize)
-                    (showFitler as? PQImageFilter)?.isPointModel = ((mStickers?.count ?? 0) > 0)
-                }
-                if showFitler != nil {
-                    BFLog(message: " 加入到缓存 的 filter timelinein:\(currentSticker.timelineIn) timelineout: \(currentSticker.timelineOut) in :\(currentSticker.model_in) out: \(currentSticker.out) index : \(index)")
-                    cacheFilters.append(showFitler!)
-                }
-
-            }
-            
-            DispatchQueue.global().async {[weak self] in
-                if let strongSelf = self {
-                    for (index, filter) in strongSelf.cacheFilters.enumerated() {
-                        BFLog(2, message: " 初始化 config create currentSticker timelinein \(String(describing: filter.stickerInfo?.timelineIn)) timelineout \(String(describing: filter.stickerInfo?.timelineOut))  in :\(String(describing: filter.stickerInfo?.model_in)) out \(String(describing: filter.stickerInfo?.out))  index\(index)")
-                    }
-                }
-            }
-            
-            if(cacheFilters.first != nil){
-                movie?.removeAllTargets()
-                let showFilter: PQBaseFilter = cacheFilters.first!
-                movie?.addTarget(showFilter, atTargetIndex: 0)
-                showFilter.addTarget(renderView, atTargetIndex: 0)
-            }
-      
-        }
- 
-    }
-
-    //创建下一个filter 数据
-    func createNextFilter() {
-        BFLog(2, message: "加入前 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
-          if cacheFilters.count <=  cacheFiltersMaxCount {
-              let showIndex = mStickers?.firstIndex(where: { (sticker) -> Bool in
-                (cacheFilters.last?.stickerInfo == sticker)
-              })
-                BFLog(2, message: "当前显示的showIndex: \(String(describing: showIndex))")
-              if ((showIndex ?? 0) + 1) < (mStickers?.count ?? 0) {
-                  let currentSticker = mStickers?[(showIndex ?? 0) + 1]
-                  if currentSticker != nil {
-                      var showFitler: PQBaseFilter?
-                      if currentSticker!.type == StickerType.VIDEO.rawValue {
-                          showFitler = PQMovieFilter(movieSticker: currentSticker!)
-
-                      } else if currentSticker!.type == StickerType.IMAGE.rawValue {
-                        showFitler = PQImageFilter(sticker: currentSticker!, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize)
-                        (showFitler as? PQImageFilter)?.isPointModel = ((mStickers?.count ?? 0) > 0)
-                      }
-                      if showFitler != nil {
-
-                          cacheFilters.append(showFitler!)
-                      }
-                  }else{
-                    BFLog(2, message: "缓存数据加入不成功!!!!!")
-                  }
-              }
-            
-            BFLog(2, message: "加入后 当前的缓存个数为: \(cacheFilters.count)  maxCount \(cacheFiltersMaxCount) 最后一个显示时间 \(String(describing: cacheFilters.last?.stickerInfo?.timelineIn))")
-             
-          }
-        
-        
-      }
- 
-    
-    /// 按时间从缓存中取出要显示的filter
-    /// - Parameter currTime: 当前播放时间
-    func changeFilter(currTime: Float64) {
-//        let  starts:CFTimeInterval = CFAbsoluteTimeGetCurrent()
-        BFLog(message: " 要查找的 currTime is \(currTime)")
-        //1,删除已经显示过的 filter
-        self.cacheFilters.removeAll(where: {(filter) -> Bool in
-
-            (currTime > (filter.stickerInfo?.timelineOut ?? 0.0))
-
-        })
- 
-        // 2,找出一个要显示的 fitler
-        let showIndex = cacheFilters.firstIndex(where: { (filter) -> Bool in
-            (currTime >= (filter.stickerInfo?.timelineIn ?? 0.0) && currTime <= (filter.stickerInfo?.timelineOut ?? 0.0))
-
-        })
-        if(showIndex == nil){
-            BFLog(2, message: "缓存没有查找到?出现数据错误!!!!")
-            return
-        }
-  
-        let showFilter: PQBaseFilter = cacheFilters[showIndex ?? 0]
-        
-        BFLog(2, message: "缓存操作   查找到命中的显示是为:\(currTime) 缓存数据timeline in :\(showFilter.stickerInfo?.timelineIn ?? 0.0)) timelineOut:\(showFilter.stickerInfo?.timelineOut ?? 0.0) in:\(showFilter.stickerInfo?.model_in ?? 0.0) out:\(showFilter.stickerInfo?.out ?? 0.0) 缓存数 \(cacheFilters.count) index: \(String(describing: showIndex))")
-        
-        if(!(showFilter.isShow)){
-            BFLog(2, message: "showIndex当前时间为  \(currTime) showIndex is \(String(describing: showIndex)) 显示 filter timelineIn is: \(String(describing: showFilter.stickerInfo?.timelineIn)) timelineOut is: \(String(describing: showFilter.stickerInfo?.timelineOut))")
- 
-            showFilter.isShow = true
-            
-            movie!.removeAllTargets()
-        
-            //为了优化性能只有素材宽高比和画面宽高比不一样时才做高斯
-            //原图的比例
-            let stickerAspectRatio = String(format: "%.6f", (showFilter.stickerInfo?.width ?? 0.0 ) / (showFilter.stickerInfo?.height ?? 0.0))
-            //画面的比例
-            let canverAspectRatio = String(format: "%.6f",(movie?.mShowVidoSize.width ?? 0.0) /  (movie?.mShowVidoSize.height ?? 0.0))
-            if(showFilter.stickerInfo?.type == StickerType.IMAGE.rawValue && showGaussianBlur && Float(stickerAspectRatio) != Float(canverAspectRatio)){
-                      BFLog(2, message: "显示图片filter")
-//                    //高斯层
-                        let  blurStickerModel:PQEditVisionTrackMaterialsModel? = showFilter.stickerInfo?.copy() as? PQEditVisionTrackMaterialsModel
-                        blurStickerModel?.canvasFillType = stickerContentMode.aspectFillStr.rawValue
-
-                        if blurStickerModel == nil {
-                            BFLog(2, message: "显示图片filter blurStickerModel is nil")
-                            return
-                        }
-                        let showGaussianFitler:PQBaseFilter = PQImageFilter(sticker: blurStickerModel!, isExport: (movie?.mIsExport) ?? false, showUISize: mCanverSize)
-                        (showGaussianFitler as? PQImageFilter)?.isPointModel = ((mStickers?.count ?? 0) > 0)
-                        
-                        let iosb:GaussianBlur = GaussianBlur.init()
-                        iosb.blurRadiusInPixels = 20
-                        showGaussianFitler.addTarget(iosb)
-                        
-                        self.movie?.addTarget(showGaussianFitler, atTargetIndex: 0)
-                        iosb.addTarget(showFilter,atTargetIndex: 0)
-                        showFilter.addTarget(self.renderView as ImageConsumer, atTargetIndex: 0)
-                
-                        BFLog(2, message: "filter 添加成功 注意是否添加成功。")
-                        
-//                    }
- 
-            }else{
-                movie?.addTarget(showFilter, atTargetIndex: 0)
-                showFilter.addTarget(renderView, atTargetIndex: 0)
-
-            }
-            self.createFiltersQueue.async {
-                self.createNextFilter()
-            }
-
-        }else{
-            BFLog(2, message: " 添加过了 currTime is \(currTime) timelineIn:\(showFilter.stickerInfo?.timelineIn ?? 0.0)")
-        }
-    }
-
-    /// 设置 filter 是否为 seek 状态
-    func setEnableSeek(isSeek: Bool) {
-        for filter in filters {
-            (filter as? PQBaseFilter)?.enableSeek = isSeek
-        }
-    }
-
-    private func applyFilters() {
-        guard let movie = movie else { return }
-        movie.removeAllTargets()
-        var currentTarget: ImageSource = movie
-        filters.forEach {
-            let f = $0
-            currentTarget.addTarget(f, atTargetIndex: 0)
-            currentTarget = f
-        }
-        currentTarget.addTarget(renderView, atTargetIndex: 0)
-    }
-}
-
-// MARK: Player control
-
-public extension PQGPUImagePlayerView {
-    ///  开始播放
-    /// - Parameter pauseFirstFrame: 是否暂停到第一帧
-    func play(pauseFirstFrame: Bool = false, playeTimeRange: CMTimeRange = CMTimeRange()) {
-        DispatchQueue.main.async {
-            self.playerEmptyView.isHidden = true
-            self.playView.isHidden = !pauseFirstFrame
-            self.playMaskView.isHidden = !pauseFirstFrame
-            self.renderView.isHidden = false
-            self.progressLab.isHidden = false
-        }
-//        guard status != .playing else {
-//            BFLog(2, message: "已经是播放状态")
-//            return
-//        }
-
-        // 如果没有设置开始结束时长 使用默认音频总时长(创作工具就不会传值)
-        if CMTIMERANGE_IS_INVALID(playeTimeRange) {
-            let endTime = CMTime(value: CMTimeValue(CMTimeGetSeconds(asset?.duration ?? .zero) * 600), timescale: 600)
-            mPlayeTimeRange = CMTimeRange(start: .zero, end: endTime)
-
-        } else {
-            mPlayeTimeRange = playeTimeRange
-        }
-        // 清空音频缓存
-        speaker?.clearBuffer()
-
-        movie?.start(timeRange: mPlayeTimeRange ?? CMTimeRange())
-
-        speaker?.start()
-
-        status = pauseFirstFrame ? .pause : .playing
-        
-        showProgressStartTime = 0
-    }
-
-    // 快进
-    func seek(to time: CMTime) {
-        mPlayeTimeRange?.start = time
-        play(pauseFirstFrame: false, playeTimeRange: mPlayeTimeRange ?? .zero)
-    }
-
-    // 暂停
-    func pause() {
-        guard status != .pause else {
-            return
-        }
-        movie?.pause()  // 可能会引起crash: configureThread()里timebaseInfo为0,除法出错
-        speaker?.pause()
-        status = .pause
-        showPlayBtn(isHidden: false)
-    }
-
-    // 停止f解码状态
-    func stop() {
-        //        guard status != .stop else {
-        //            return
-        //        }
-
-        movie?.removeAllTargets()
-        movie?.cancel()
-        speaker?.cancel()
-        status = .stop
-    }
-
-    // 清空播放器状态,到空状态
-    func clearPlayerView() {
-        playerEmptyView.isHidden = false
-        renderView.isHidden = true
-        progressLab.isHidden = true
-    }
-
-    // 显示提示文字
-    func showTip(show: Bool) {
-        BFLog(2, message: "showTip \(show)")
-        tipLab.isHidden = !show
-        if show {
-            playerEmptyView.isHidden = true
-
-            renderView.isHidden = true
-            progressLab.isHidden = true
-        }
-    }
-}
-
-// MARK: Filter 操作
-
-public extension PQGPUImagePlayerView {
-    // 添加 filter
-    func appendFilter(_ filter: ImageProcessingOperation) {
-        filters.append(filter)
-    }
-
-    // 添加一组filters
-    func appendFilters(_ newFilters: [ImageProcessingOperation]) {
-        filters = filters + newFilters
-    }
-
-    // 移除所有filter
-    func removeAllFilters() {
-        filters.removeAll()
-    }
-
-    // 重置所有 filer
-    func appendFiltersClearOldFilter(_ newFilters: [ImageProcessingOperation]) {
-        filters.removeAll()
-        filters = newFilters
-    }
-
-}
-
-// MARK: - RenderViewDelegate
-extension PQGPUImagePlayerView: RenderViewDelegate{
-    public func willDisplayFramebuffer(renderView _: RenderView, framebuffer _: Framebuffer) {
-        BFLog(2, message: "willDisplayFramebuffer")
-    }
-
-    public func didDisplayFramebuffer(renderView _: RenderView, framebuffer: Framebuffer) {
-        BFLog(2, message: "didDisplayFramebuffer")
-    }
-
-    public func shouldDisplayNextFramebufferAfterMainThreadLoop() -> Bool {
-        BFLog(2, message: "didDisplayFramebuffer")
-        
-        return false
-    }
-}
-
-

+ 0 - 855
BFStuckPointKit/Classes/ViewModel/PQPlayerViewModel.swift

@@ -1,855 +0,0 @@
-//
-//  PQPlayerViewModel.swift
-//  PQSpeed
-//
-//  Created by ak on 2021/1/27.
-//  Copyright © 2021 BytesFlow. All rights reserved.
-//  视频渲染相关逻辑方法
-
-import RealmSwift
-import UIKit
-import BFCommonKit
-import BFUIKit
-import BFMediaKit
-
-open class PQPlayerViewModel: NSObject {
-    /// 根据贴纸信息转成种 fitler ,编辑 ,总览,导出共用
-    /// - Parameter parts: filter 组
-    public class func partModelToFilters(sections: [PQEditSectionModel], inputSize: CGSize = .zero) -> ([PQBaseFilter], [URL]) {
-        // 所有段的声音位置
-        var audioFiles: Array = Array<URL>.init()
-        // 所有滤镜数组
-        var filters: Array = Array<PQBaseFilter>.init()
-
-        /*
-         一, 默认素材时长
-         图片:2S
-         视频: X1倍速 播一边
-         GIF: X1倍速 播一边
-
-         二,资源适配规则
-         1,有配音声音 也就是有文字
-         适配系数 = 配音时长/视觉总时长
-         视觉元素最终时长 = 视觉元素原时长 * 适配系数
-         2,无配音无文字
-         使用素材的默认时长
-         3,无配音有文字
-         适配系数 = 视频总时长/文字总时长
-         文字每一句的实际时长 = 文字分段落的原始时长 * 适配系统
-
-         */
-
-        // 返回时自动预览开始播放 添加有贴纸开始自动播放
-
-        var partTotaDuration: Float64 = 0
-        for section in sections {
-            autoreleasepool {
-                // 优先使用 mix audio
-                if section.mixEmptyAuidoFilePath.count > 0 {
-                    audioFiles.append(URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath.replacingOccurrences(of: documensDirectory, with: "")))
-                    BFLog(message: "add mixEmptyAuidoFilePath mixEmptyAuidoFilePath")
-                } else {
-                    if section.audioFilePath.count > 0 {
-                        audioFiles.append(URL(fileURLWithPath: documensDirectory + section.audioFilePath.replacingOccurrences(of: documensDirectory, with: "")))
-                        BFLog(message: "add audioFilePath audioFilePath")
-                    }
-                }
-
-                var totalDuration: Float64 = 0
-                // 根据已经选择的贴纸类型创建各自filters
-                for sticker in section.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
-                    autoreleasepool {
-                        
-                        sticker.timelineIn = totalDuration + partTotaDuration
-                        totalDuration = totalDuration + sticker.aptDuration
-                        sticker.timelineOut = totalDuration + partTotaDuration
-                        BFLog(message: "创建 filter start :\(sticker.timelineIn) end :\(sticker.timelineOut) type is \(sticker.type)")
-                        if(sticker.aptDuration > 0){
-                            if sticker.type == StickerType.IMAGE.rawValue {
-                                let imageFilter = PQImageFilter(sticker: sticker)
-                                filters.append(imageFilter)
-
-                            } else if sticker.type == StickerType.VIDEO.rawValue {
-                                let videoFilter = PQMovieFilter(movieSticker: sticker)
-
-                                filters.append(videoFilter)
-
-                            } else if sticker.type == StickerType.GIF.rawValue {
-                                let gifFilter = PQGifFilter(sticker: sticker)
-                                filters.append(gifFilter)
-                            }
-                        }else{
-                            BFLog(message: "sticker.aptDuration is error create filter error!!! \(sticker.aptDuration )")
-                        }
-                 
-                    }
-                }
-
-                // 字幕如果是多段的 ,字幕的开始时间是 前几段 part duration 总时长 所以要重新计算
-                var newSubtitleData: [PQEditSubTitleModel] = Array()
-
-                // 如果有录制声音转的字幕优先使用,在使用人工输入文字字幕s
-                let recorderSubtitle = List<PQEditSubTitleModel>()
-                if section.sectionTimeline?.visionTrack?.getSubtitleMatraislInfo() != nil {
-                    for subtitleMatraislInfo in section.sectionTimeline!.visionTrack!.getSubtitleMatraislInfo() {
-                        BFLog(message: "有录音字幕")
-                        let editSubTitleModel = PQEditSubTitleModel()
-                        editSubTitleModel.text = subtitleMatraislInfo.subtitleInfo?.text ?? ""
-                        editSubTitleModel.timelineIn = subtitleMatraislInfo.timelineIn
-                        editSubTitleModel.timelineOut = subtitleMatraislInfo.timelineOut
-                        recorderSubtitle.append(editSubTitleModel)
-                    }
-                }
-
-                for (index, subTitle) in recorderSubtitle.count > 0 ? recorderSubtitle.enumerated() : section.subTitles.enumerated() {
-                    BFLog(message: "有配音字幕")
-                    let newSubtitle = PQEditSubTitleModel()
-                    newSubtitle.timelineIn = subTitle.timelineIn
-                    newSubtitle.timelineOut = subTitle.timelineOut
-                    newSubtitle.text = subTitle.text.replacingOccurrences(of: "\n", with: "")
-                    BFLog(message: "第\(index)个字幕 subTitle old start : \(newSubtitle.timelineIn)  end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
-
-                    // subtitle duration
-                    let duration: Float64 = (newSubtitle.timelineOut - newSubtitle.timelineIn)
-
-                    newSubtitle.timelineIn = partTotaDuration + newSubtitle.timelineIn
-                    newSubtitle.timelineOut = newSubtitle.timelineIn + duration
-
-                    BFLog(message: "第\(index)个字幕 subTitle new start : \(newSubtitle.timelineIn)  end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
-
-                    newSubtitleData.append(newSubtitle)
-
-//                    let subTitle = PQSubTitleFilter(st: [newSubtitle], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
-//                    filters.append(subTitle)
-                }
-                // 无视觉素材是大字幕方式 有数据在初始字幕filter
-
-//                for subtitle in newSubtitleData{
-//                    let subTitleFilter = PQSubTitleFilter(st: [newSubtitleData[0]], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0,inputSize: inputSize)
-//                    filters.append(subTitleFilter)
-//                }
-
-                if newSubtitleData.count > 0 {
-                    let subTitleFilter = PQSubTitleFilter(st: newSubtitleData, isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
-                    filters.append(subTitleFilter)
-
-//                    DispatchQueue.main.async {
-
-//                    }
-                }
-
-                var tempDuration = section.allStickerAptDurationNoRound() == 0 ? section.sectionDuration : section.allStickerAptDurationNoRound()
-                BFLog(message: "tempDuration 1 is \(tempDuration)")
-                // 如果音频时长是经过加空音频 加长后的 要使用长音频
-                if section.mixEmptyAuidoFilePath.count > 0 {
-                    BFLog(message: "有拼接的数据")
-                    let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath), options: avAssertOptions)
-                    if tempDuration <= audioAsset.duration.seconds {
-                        tempDuration = audioAsset.duration.seconds
-                    } else {
-                        BFLog(message: "音频文件时长为0?")
-                    }
-                }
-                BFLog(message: "tempDuration 2 is \(tempDuration)")
-
-                partTotaDuration = partTotaDuration + tempDuration
-            }
-            BFLog(message: "audioFiles 声音文件总数\(audioFiles.count)")
-        }
-        //"/Resource/DownloadImages/images_1631358852.933532"
-        //""/Resource/DownloadImages/images_1631358852.933532""
-        return (filters, audioFiles)
-    }
-
-    public class func calculationStickAptDurationReal(currentPart: PQEditSectionModel, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
-        // XXXXXX如果 没有选择发音人 就算有自动的转的声音文件也不按声音时长计算,都是素材原有时长
-//        let audioTotalDuration: Float64 = Float64(currentPart.sectionDuration)
-        // 1,计算贴纸所有原始时长
-        var stickerTotalDuration: Float64 = 0
-
-        for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
-            var stikcerDuration: Float64 = sticker.duration
-            if sticker.videoIsCrop() {
-                BFLog(message: "这个视频有裁剪 \(sticker.locationPath)")
-                stikcerDuration = sticker.out - sticker.model_in
-            }
-
-            stickerTotalDuration = stickerTotalDuration + stikcerDuration
-        }
-
-        // 真人声音时长
-        var realAudioDuration = 0.0
-        BFLog(message: "currentPart.audioFilePath is \(currentPart.audioFilePath)")
-        if currentPart.audioFilePath.count > 0 {
-            let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath), options: avAssertOptions)
-            realAudioDuration = audioAsset.duration.seconds
-        }
-
-        BFLog(message: "所有素材的总时 \(stickerTotalDuration)  文字转语音的时长:\(realAudioDuration)")
-
-        if stickerTotalDuration == 0 && realAudioDuration == 0 {
-            DispatchQueue.main.async {
-                completeHander(currentPart)
-            }
-            return
-        }
-
-        // 所有视频素材原有时长 > 音频文件(字幕时长 有可能有声音,有可能没有声音自动转的)
-        if stickerTotalDuration - realAudioDuration > 0.01 {
-            // 要创建空文件加长原有声音
-            let tool = PQCreateEmptyWAV(sampleRate: 8000,
-                                        channel: 1,
-                                        duration: stickerTotalDuration - realAudioDuration,
-                                        bit: 16)
-            let timeInterval: TimeInterval = Date().timeIntervalSince1970
-
-            var audioFileTempPath = exportAudiosDirectory
-            if !directoryIsExists(dicPath: audioFileTempPath) {
-                BFLog(message: "文件夹不存在 \(audioFileTempPath)")
-                createDirectory(path: audioFileTempPath)
-            }
-
-            audioFileTempPath.append("empty_\(timeInterval).wav")
-
-            tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
-
-                var tempUrls: Array = NSArray() as! [URL]
-
-                if currentPart.audioFilePath.count > 0 {
-                    BFLog(message: "currentPart.audioFilePath is \(String(describing: currentPart.audioFilePath))")
-                    tempUrls.append(URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath))
-                }
-                tempUrls.append(URL(fileURLWithPath: audioFileTempPath))
-
-                PQPlayerViewModel.mergeAudios(urls: tempUrls) { completURL in
-
-                    if completURL == nil {
-                        BFLog(message: "合并文件有问题!")
-                        return
-                    }
-                    //                file:///var/mobile/Containers/Data/Application/2A008644-31A6-4D7E-930B-F1099F36D577/Documents/Resource/ExportAudios/merge_1618817019.789495.m4a
-                    let audioAsset = AVURLAsset(url: completURL!, options: avAssertOptions)
-
-                    BFLog(message: "completURL mix : \(String(describing: completURL)) audioFilePath durtion  \(audioAsset.duration.seconds)")
-
-                    currentPart.mixEmptyAuidoFilePath = completURL!.absoluteString.replacingOccurrences(of: documensDirectory, with: "").replacingOccurrences(of: "file://", with: "")
-                    currentPart.sectionDuration = audioAsset.duration.seconds
-
-                    BFLog(message: "stickerTotalDuration is \(stickerTotalDuration)  mixEmptyAuidoFilePath 设置后 是\(currentPart.mixEmptyAuidoFilePath) 时长是:\(currentPart.sectionDuration)")
-
-                    // 1.2)计算贴纸的逻辑显示时长
-                    for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
-                        var tempDuration = sticker.duration
-                        if sticker.videoIsCrop() {
-                            tempDuration = sticker.out - sticker.model_in
-                            BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
-                        }
-                        sticker.aptDuration = tempDuration
-                    }
-
-                    DispatchQueue.main.async {
-                        completeHander(currentPart)
-                    }
-                }
-            }
-
-        } else {
-            // 这种情况下 mixEmptyAuidoFilePath  应该为空
-            currentPart.mixEmptyAuidoFilePath = ""
-//            currentPart.audioFilePath = ""
-            currentPart.sectionDuration = realAudioDuration
-            // 1.1)计算系数
-            let coefficient: Float64 = realAudioDuration / stickerTotalDuration
-
-            BFLog(message: "系数 is: \(coefficient) stickerTotalDuration is \(stickerTotalDuration) audioTotalDuration is :\(realAudioDuration)")
-
-            // 1.2)计算贴纸的逻辑显示时长
-            for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
-                // 如果是视频素材有过裁剪 就使用裁剪时长
-                var tempDuration = sticker.duration
-
-                if sticker.videoIsCrop() {
-                    tempDuration = sticker.out - sticker.model_in
-                    BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
-                }
-                // 如果没有音频 系数为0时 使用素材的原始时长
-                sticker.aptDuration = (coefficient == 0) ? tempDuration : tempDuration * coefficient
-            }
-
-            DispatchQueue.main.async {
-                completeHander(currentPart)
-            }
-        }
-    }
-
-    // 计算所有贴纸的逻辑时长
-    public class func calculationStickAptDuration(currentPart: PQEditSectionModel, createFirst: Bool = true, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
-        if currentPart.sectionType == "global" {
-            BFLog(message: "音频段落不处理计算")
-            return
-        }
-        // 从素材详细界面返回 有可能是删除素材操作 这时如果没有选择发音人同时没有录音和导入数据要重新计算空文件时长
-        let speeckAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.SPEECH.rawValue)
-
-        let localAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.LOCAL.rawValue)
-
-        if !currentPart.haveSelectVoice(), speeckAudioTrackModel == nil, localAudioTrackModel == nil, createFirst {
-            // 只有视觉素材 没有文字
-            if currentPart.sectionText.count == 0 {
-                // 根据视觉的总时长生成空音频数据
-                var timeCount: Double = 0
-
-                for sticker in (currentPart.sectionTimeline!.visionTrack?.getEnableVisionTrackMaterials())! {
-                    if sticker.out != 0 || sticker.model_in == 0 {
-                        timeCount = timeCount + (sticker.out - sticker.model_in)
-
-                    } else {
-                        timeCount = timeCount + sticker.aptDuration
-                    }
-                }
-                BFLog(message: "计算视觉的总时长 \(timeCount)")
-                if timeCount > 0 {
-                    let tool = PQCreateEmptyWAV(sampleRate: 8000,
-                                                channel: 1,
-                                                duration: timeCount,
-                                                bit: 16)
-                    let timeInterval: TimeInterval = Date().timeIntervalSince1970
-
-                    var audioFileTempPath = exportAudiosDirectory
-                    if !directoryIsExists(dicPath: audioFileTempPath) {
-                        BFLog(message: "文件夹不存在 \(audioFileTempPath)")
-                        createDirectory(path: audioFileTempPath)
-                    }
-
-                    audioFileTempPath.append("empty_\(timeInterval).wav")
-
-                    tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
-                        currentPart.audioFilePath = audioFileTempPath.replacingOccurrences(of: documensDirectory, with: "")
-
-                        calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
-                    }
-                } else {
-                    calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
-                }
-            } else {
-                calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
-            }
-        } else {
-            calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
-        }
-    }
-
-    // 首尾拼接音频文件
-    /*
-     因为在对音频做合并或者裁切的时候生成的音频格式是m4a的,但是m4a转成mp3会损坏音频格式,所以我当时采用先把m4a转为wav,再用wav转成mp3。
-     */
-
-    /// 合并声音
-    /// - Parameter urls: 所有音频的URL  是全路径方便复用
-    /// - Parameter completeHander: 返回的 URL 全路径的 URL 如果要保存替换掉前缀
-    public class func mergeAudios(urls: [URL], completeHander: @escaping (_ fileURL: URL?) -> Void) {
-        let timeInterval: TimeInterval = Date().timeIntervalSince1970
-        let composition = AVMutableComposition()
-        var totalDuration: CMTime = .zero
-        BFLog(message: "合并文件总数 \(urls.count)")
-        for urlStr in urls {
-            BFLog(message: "合并的文件地址: \(urlStr)")
-            let audioAsset = AVURLAsset(url: urlStr, options: avAssertOptions)
-            let tracks1 = audioAsset.tracks(withMediaType: .audio)
-            if tracks1.count == 0 {
-                BFLog(message: "音频数据无效不进行合并,所有任务结束要确保输入的数据都正常! \(urlStr)")
-                break
-            }
-            let assetTrack1: AVAssetTrack = tracks1[0]
-
-            let duration1: CMTime = assetTrack1.timeRange.duration
-
-            BFLog(message: "每一个文件的 duration \(CMTimeGetSeconds(duration1))")
-
-            let timeRange1 = CMTimeRangeMake(start: .zero, duration: duration1)
-
-            let compositionAudioTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
-
-            do {
-                //
-                try compositionAudioTrack.insertTimeRange(timeRange1, of: assetTrack1, at: totalDuration)
-
-            } catch {
-                BFLog(message: "error is \(error)")
-            }
-
-            totalDuration = CMTimeAdd(totalDuration, audioAsset.duration)
-        }
-
-        if CMTimeGetSeconds(totalDuration) == 0 {
-            BFLog(message: "所有数据无效")
-            completeHander(nil)
-            return
-        } else {
-//            拼接声音文件 完成
-            BFLog(message: "totalDuration is \(CMTimeGetSeconds(totalDuration))")
-        }
-
-        let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
-        BFLog(message: "assetExport.supportedFileTypes is \(String(describing: assetExport?.supportedFileTypes))")
-
-        assetExport?.outputFileType = .m4a
-        // XXXX 注意文件名的后缀要和outputFileType 一致 否则会导出失败
-        var audioFilePath = exportAudiosDirectory
-
-        if !directoryIsExists(dicPath: audioFilePath) {
-            BFLog(message: "文件夹不存在")
-            createDirectory(path: audioFilePath)
-        }
-        audioFilePath.append("merge_\(timeInterval).m4a")
-
-        let fileUrl = URL(fileURLWithPath: audioFilePath)
-
-        assetExport?.outputURL = fileUrl
-        assetExport?.exportAsynchronously {
-            if assetExport!.status == .completed {
-                // 85.819125
-                let audioAsset = AVURLAsset(url: fileUrl, options: avAssertOptions)
-
-                BFLog(message: "拼接声音文件 完成 \(fileUrl) 时长is \(CMTimeGetSeconds(audioAsset.duration))")
-                completeHander(fileUrl)
-
-            } else {
-                print("拼接出错 \(String(describing: assetExport?.error))")
-                completeHander(URL(string: ""))
-            }
-        }
-    }
-
-    /// 根据选择的画布类型计算播放器显示的位置和大小
-    /// - Parameters:
-    ///   - editProjectModel: 项目数据
-    ///   - showType: 显示类型 1, 编辑界面  2,总览界面
-    /// - Returns: 显示的坐标和位置
-    public class func getShowCanvasRect(editProjectModel: PQEditProjectModel?, showType: Int, playerViewHeight: CGFloat = 216 / 667 * cScreenHeigth) -> CGRect {
-        if editProjectModel == nil {
-            BFLog(message: "editProjectModel is error")
-            return CGRect()
-        }
-        // UI播放器的最大高度,同时最大宽度为设备宽度
-        var showRect: CGRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
-
-        let canvasType: Int = editProjectModel!.sData!.videoMetaData!.canvasType
-
-        if showType == 1 { // 编辑界面
-            switch canvasType {
-            case videoCanvasType.origin.rawValue:
-
-                // 使用有效素材第一位
-                var firstModel: PQEditVisionTrackMaterialsModel?
-                for part in editProjectModel!.sData!.sections {
-                    if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
-                        firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
-                        break
-                    }
-                }
-                if firstModel != nil {
-                    if firstModel?.width == 0 || firstModel?.height == 0 {
-                        BFLog(message: "!!!!!!!!!!!素材宽高有问题!!!!!!!!!!!")
-                    }
-                    BFLog(1, message: "第一个有效素材的大小 \(String(describing: firstModel?.width)) \(String(describing: firstModel?.height))")
-                    let ratioMaterial: Float = (firstModel?.width ?? 0) / (firstModel?.height ?? 0)
-                    if ratioMaterial > 1 {
-                        // 横屏
-                        var tempPlayerHeight = cScreenWidth * CGFloat(firstModel!.height / firstModel!.width)
-                        var scale: CGFloat = 1.0
-                        if tempPlayerHeight > playerViewHeight {
-                            scale = CGFloat(playerViewHeight) / CGFloat(tempPlayerHeight)
-                            tempPlayerHeight = tempPlayerHeight * scale
-                        }
-                        showRect = CGRect(x: (cScreenWidth - cScreenWidth * scale) / 2, y: (playerViewHeight - tempPlayerHeight) / 2, width: cScreenWidth * scale, height: tempPlayerHeight)
-                    } else {
-                        // 竖屏
-                        let playerViewWidth = (CGFloat(firstModel!.width) / CGFloat(firstModel!.height)) * playerViewHeight
-                        showRect = CGRect(x: (cScreenWidth - playerViewWidth) / 2, y: 0, width: playerViewWidth, height: playerViewHeight)
-                    }
-                } else {
-                    // 没有视觉素材时,只有文字,语音时,默认为原始但显示的 VIEW 为 1:1
-                    showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
-                }
-
-            case videoCanvasType.oneToOne.rawValue:
-                showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
-            case videoCanvasType.nineToSixteen.rawValue:
-                showRect = CGRect(x: (cScreenWidth - playerViewHeight * (9.0 / 16.0)) / 2, y: 0, width: playerViewHeight * (9.0 / 16.0), height: playerViewHeight)
-            case videoCanvasType.sixteenToNine.rawValue:
-                showRect = CGRect(x: 0, y: 0 + (playerViewHeight - cScreenWidth * (9.0 / 16.0)) / 2, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
-            default:
-                break
-            }
-        } else if showType == 2 { // 总览界面
-            switch canvasType {
-            case videoCanvasType.origin.rawValue:
-
-                BFLog(message: "总览时画布的大小 \(String(describing: editProjectModel!.sData!.videoMetaData?.videoWidth)) \(String(describing: editProjectModel!.sData!.videoMetaData?.videoHeight))")
-                // 画布的宽高 和宽高比值
-                let materialWidth = editProjectModel!.sData!.videoMetaData?.videoWidth ?? 0
-                let materialHeight = editProjectModel!.sData!.videoMetaData?.videoHeight ?? 1
-                let ratioMaterial: Float = Float(materialWidth) / Float(materialHeight)
-
-                if ratioMaterial > 1 {
-                    // 横屏
-                    showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * CGFloat(materialHeight) / CGFloat(materialWidth))
-                } else if ratioMaterial < 1 {
-                    // 竖屏
-                    showRect = CGRect(x: (cScreenWidth - cScreenWidth * CGFloat(materialWidth) / CGFloat(materialHeight)) / 2, y: 0, width: cScreenWidth * (CGFloat(materialWidth) / CGFloat(materialHeight)), height: cScreenWidth)
-                    BFLog(message: "showRect is \(showRect)")
-                } else {
-                    showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
-                }
-
-            case videoCanvasType.oneToOne.rawValue:
-                showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
-            case videoCanvasType.nineToSixteen.rawValue:
-                showRect = CGRect(x: (cScreenWidth - cScreenWidth * (9.0 / 16.0)) / 2, y: 0, width: cScreenWidth * (9.0 / 16.0), height: cScreenWidth)
-            case videoCanvasType.sixteenToNine.rawValue:
-                showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
-
-            default:
-                break
-            }
-        }
-
-        return showRect
-    }
-
-    /*
-     1, 加工入口进入编辑界面 默认画布?默认为 原始
-     2,进入编辑界面如果选了一个素材 画布就是实际大小,
-     3,没视觉素材时 点击原始显示1:1
-     4, 上传入口进入编辑界面 默认画布为原始
-     5, 从草稿箱进来时,使用恢复的画布大小
-     6, 如果选择了原始,移动素材后都按最新的第一个素材修改画布
-     */
-
-    /// sdata json canvastype 转到 UI 所使用类型
-    /// - Parameter projectModel: project sdata
-    /// - Returns: UI 使用类型
-    public class func videoCanvasTypeToAspectRatio(projectModel: PQEditProjectModel?) -> aspectRatio? {
-        // add by ak 给素材详情界面传比例参数如果是原始大小的要传 size
-        var aspectRatioTemp: aspectRatio?
-        if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.origin.rawValue {
-            var firstModel: PQEditVisionTrackMaterialsModel?
-            for part in projectModel!.sData!.sections {
-                if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
-                    firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
-                    break
-                }
-            }
-
-            if firstModel != nil {
-                aspectRatioTemp = .origin(width: CGFloat(firstModel!.width), height: CGFloat(firstModel!.height))
-            } else {
-                aspectRatioTemp = .origin(width: CGFloat(projectModel?.sData?.videoMetaData?.videoWidth ?? 0), height: CGFloat(projectModel?.sData?.videoMetaData?.videoHeight ?? 0))
-            }
-
-        } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.oneToOne.rawValue {
-            aspectRatioTemp = .oneToOne
-        } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.nineToSixteen.rawValue {
-            aspectRatioTemp = .nineToSixteen
-        } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.sixteenToNine.rawValue {
-            aspectRatioTemp = .sixteenToNine
-        }
-        return aspectRatioTemp
-    }
-
-    public class func getCanvasBtnName(canvasType: videoCanvasType) -> (String, String) {
-        var btnText: String = "自适应"
-        var btnImageName: String = "settingZoom_origin_h"
-
-        if canvasType == .origin {
-            btnText = "自适应"
-            btnImageName = "settingZoom_origin_h"
-
-        } else if canvasType == .oneToOne {
-            btnText = "1:1"
-            btnImageName = "settingZoom_oneToOne_h"
-        } else if canvasType == .sixteenToNine {
-            btnText = "16:9"
-            btnImageName = "settingZoom_sixteenToNine_h"
-        } else if canvasType == .nineToSixteen {
-            btnText = "9:16"
-            btnImageName = "settingZoom_nineToSixteen_h"
-        }
-
-        return (btnText, btnImageName)
-    }
-}
-
-// MARK: - 混音相关
-
-/// 混音相关
-extension PQPlayerViewModel {
-    /// 混音合成
-    /// - Parameters:
-    ///   - originAsset: 空音乐文件素材
-    ///   - bgmData: 背景音乐
-    ///   - videoStickers: 视频素材
-    ///   - originMusicDuration : 要播放的时长
-    ///   - lastSecondPoint : 音频长度不够时,拼接音频文件时的结束时间,推荐卡点的倒数第二位
-    ///   - startTime: 裁剪的开始位置。
-    /// - Returns:
-    public class func setupAudioMix(originAsset: AVURLAsset, bgmData: PQVoiceModel?, videoStickers: [PQEditVisionTrackMaterialsModel]?,originMusicDuration:Float = 0,clipAudioRange: CMTimeRange = CMTimeRange.zero,startTime:CMTime = .zero ) -> (AVMutableAudioMix, AVMutableComposition) {
-        let composition = AVMutableComposition()
-        let audioMix = AVMutableAudioMix()
-        var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
-     
-        // 处理选择的主音乐
-        if(originMusicDuration > Float(CMTimeGetSeconds(clipAudioRange.duration))){
-            BFLog(message: "要播放的时间长,比原音频要长进行拼接originMusicDuration:\(originMusicDuration)   originAsset.duration \(CMTimeGetSeconds(clipAudioRange.duration))")
-            let originaParameters =  dealWithOriginAssetTrack(originAsset: originAsset, totalDuration: Float64(originMusicDuration), composition: composition,clipAudioRange: clipAudioRange,mStartTime: startTime)
-            BFLog(message: "originaParameters count \(originaParameters.count)")
-            if originaParameters.count > 0 {
-                tempParameters = tempParameters + originaParameters
-            }
-            
-        }else{
-            BFLog(message: "音频不用拼接:\(CMTimeGetSeconds(originAsset.duration))")
-            let parameters = mixAudioTrack(audioAsset: originAsset, trackTimeRange: CMTimeRange(start: .zero, end: originAsset.duration), composition: composition)
-            if parameters != nil {
-                tempParameters.append(parameters!)
-            }else{
-                
-                BFLog(message: "parameters is error \(CMTimeGetSeconds(originAsset.duration))")
-            }
-        }
-     
-        // 处理背景音乐
-        if bgmData != nil, bgmData?.localPath != nil {
-            let bgmParameters = dealWithBGMTrack(bgmData: bgmData!, totalDuration: originAsset.duration.seconds, composition: composition)
-            if bgmParameters.count > 0 {
-                tempParameters = tempParameters + bgmParameters
-            }
-        }
-        // 处理素材音乐
-        if videoStickers != nil, (videoStickers?.count ?? 0) > 0 {
-            for sticker in videoStickers! {
-                if sticker.volumeGain == 0 {
-                    // 如果添加了会有刺啦音
-                    BFLog(message: "音频音量 为0 不添加")
-                    continue
-                }
-                let stickerParameters = dealWithMaterialTrack(stickerModel: sticker, composition: composition)
-                if stickerParameters.count > 0 {
-                    tempParameters = tempParameters + stickerParameters
-                }
-            }
-        }
-        audioMix.inputParameters = tempParameters
-        // 导出音乐
-        // exportAudio(comosition: composition)
-        return (audioMix, composition)
-    }
-    
-    /// 处理原主音乐音轨  e.g. 原音频时长只有30s  要播放 250s 的音频 拼接原音频音轨
-    /// - Parameters:
-    ///   - originAsset: 原音频文件地址
-    ///   - composition:
-    /// - Returns:
-    public class func dealWithOriginAssetTrack(originAsset: AVURLAsset, totalDuration: Float64, composition: AVMutableComposition,clipAudioRange: CMTimeRange = CMTimeRange.zero,mStartTime:CMTime = .zero ) -> [AVMutableAudioMixInputParameters] {
-        var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
-        let volume:Float = 1.0
-        let originaDuration = CMTimeGetSeconds(clipAudioRange.duration)
-        BFLog(message: "处理主音频 原始时长startTime = \(originaDuration) 要显示时长totalDuration = \(totalDuration)")
-        //整倍数
-        let  count = Int(totalDuration) / Int(originaDuration)
-//        count = count + 1
-        //有余数多 clip 一整段
-        let row = totalDuration - Double(count) * originaDuration
-        //已经拼接的总时长
-        var clipTotalDuration:Float = 0.0
-        if count > 0 {
-            for index in 0 ..< count {
-                BFLog(message: "this is running running")
-                //第一段是用户选择的开始时间 到倒数第二个卡点, 其它段都是从推荐卡点到倒数第二个卡点
-                var startTime = CMTime.zero
-                var trackTimeRange = clipAudioRange
-       
-                if(index == 0){
-                    startTime = mStartTime
-                    trackTimeRange =  CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue(CMTimeGetSeconds(clipAudioRange.end)), timescale: playerTimescaleInt))
-                    clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
-                }else{
-                    // (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))为用户选择的第一段时长
-                    startTime = CMTime(value: CMTimeValue((CMTimeGetSeconds( clipAudioRange.duration) * Double(index) + (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))) * Float64(playerTimescaleInt)), timescale: playerTimescaleInt)
-                    trackTimeRange = clipAudioRange
-                    
-                    clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
-                }
-//                BFLog(1, message: "原音频时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
-                let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-                if parameters != nil {
-                    tempParameters.append(parameters!)
-                }else{
-                    BFLog(message: "接拼出现错误!!!!")
-                }
-            }
-        }
-        if(row > 0){
-            
-            let startTime = CMTime(value: CMTimeValue(clipTotalDuration * Float(playerTimescaleInt)), timescale: playerTimescaleInt)
-            
-            let trackTimeRange = CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue((CMTimeGetSeconds(startTime) + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-            BFLog(1, message: "最后一小段音乐时长短:count = \(count),startTime = \(CMTimeShow(startTime)),trackTimeRange = \(CMTimeRangeShow(trackTimeRange))")
-            let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-            if parameters != nil {
-                tempParameters.append(parameters!)
-            }
-            clipTotalDuration = clipTotalDuration + Float(row)
-            
-        }
-        BFLog(message: "拼接的音频总时长: \(clipTotalDuration)")
-
-        return tempParameters
-    }
-
-    /// 处理背景音乐音轨
-    /// - Parameters:
-    ///   - stickerModel: <#stickerModel description#>
-    ///   - composition: <#composition description#>
-    /// - Returns: <#description#>
-    public class func dealWithBGMTrack(bgmData: PQVoiceModel, totalDuration: Float64, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
-        var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
-        let bgmAsset = AVURLAsset(url: URL(fileURLWithPath: bgmData.localPath ?? ""), options: avAssertOptions)
-        let volume = Float(bgmData.volume) / 100.0
-        let bgmDuration = (Float64(bgmData.duration ?? "0") ?? 0) - bgmData.startTime
-        BFLog(message: "处理背景音乐:startTime = \(bgmData.startTime),bgmDuration = \(bgmDuration),totalDuration = \(totalDuration)")
-
-        if bgmDuration < totalDuration {
-            let count = Int(totalDuration) / Int(bgmDuration)
-            let row = totalDuration - Double(count) * bgmDuration
-            if count > 0 {
-                for index in 0 ..< count {
-                    let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(index) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
-                    let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + bgmDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-                    BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
-                    let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-                    if parameters != nil {
-                        tempParameters.append(parameters!)
-                    }
-                }
-            }
-            if row > 0 {
-                let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(count) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
-                let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-                BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
-                let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-                if parameters != nil {
-                    tempParameters.append(parameters!)
-                }
-            }
-        } else {
-            let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + totalDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-            BFLog(message: "背景音乐时长长:trackTimeRange = \(trackTimeRange)")
-            let bgmParameters = mixAudioTrack(audioAsset: bgmAsset, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-            if bgmParameters != nil {
-                tempParameters.append(bgmParameters!)
-            }
-        }
-        return tempParameters
-    }
-
-    /// 处理素材音轨
-    /// - Parameters:
-    ///   - stickerModel: <#stickerModel description#>
-    ///   - composition: <#composition description#>
-    /// - Returns: <#description#>
-    public class func dealWithMaterialTrack(stickerModel: PQEditVisionTrackMaterialsModel, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
-        var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
-        let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + stickerModel.locationPath), options: avAssertOptions)
-        let volume = Float(stickerModel.volumeGain) / 100
-        let rangeStart = stickerModel.model_in
-        var rangeEnd = stickerModel.out
-        if rangeEnd == 0 {
-            rangeEnd = audioAsset.duration.seconds
-        }
-        var originDuration = (rangeEnd - rangeStart)
-        if stickerModel.aptDuration < originDuration {
-            originDuration = stickerModel.aptDuration
-        }
-
-        if stickerModel.aptDuration > originDuration, stickerModel.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
-            let count = originDuration == 0 ? 0 : Int(stickerModel.aptDuration) / Int(originDuration)
-            let row = stickerModel.aptDuration - Double(count) * originDuration
-            if count > 0 {
-                for index in 0 ..< count {
-                    let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(index)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
-                    let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-                    let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-                    if parameters != nil {
-                        tempParameters.append(parameters!)
-                    }
-                }
-            }
-            if row > 0 {
-                let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(count)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
-                let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-                let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-                if parameters != nil {
-                    tempParameters.append(parameters!)
-                }
-            }
-        } else {
-            let startTime = CMTime(value: CMTimeValue(stickerModel.timelineIn * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
-            let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
-            let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
-            if parameters != nil {
-                tempParameters.append(parameters!)
-            }
-        }
-        return tempParameters
-    }
-
-    /// 混音添加音轨
-    /// - Parameters:
-    ///   - audioAsset: 素材资源
-    ///   - startTime: 从什么时间开始播放
-    ///   - trackTimeRange: 播放素材范围
-    ///   - volume:音轨音量
-    ///   - composition: <#composition description#>
-    /// - Returns: <#description#>
-    public class func mixAudioTrack(audioAsset: AVURLAsset, startTime: CMTime = CMTime.zero, trackTimeRange: CMTimeRange, volume: Float = 1, composition: AVMutableComposition) -> AVMutableAudioMixInputParameters? {
-        BFLog(message: "startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
-        // 第一个音轨
-        // let assetTrack : AVAssetTrack? = audioAsset.tracks(withMediaType: .audio).first
-        // 所有音轨
-        let assetTracks: [AVAssetTrack]? = audioAsset.tracks(withMediaType: .audio)
-        if assetTracks != nil, (assetTracks?.count ?? 0) > 0 {
-            let audioTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
-            let mixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
-            mixInputParameters.setVolume(volume, at: startTime)
-            do {
-                // 第一个音轨插入到原音的开始和结束位置
-                // try audioTrack?.insertTimeRange(trackTimeRange, of: assetTrack!, at: startTime)
-                // 所有音轨插入到原音的开始和结束位置
-                let timeRanges = Array(repeating: NSValue(timeRange: trackTimeRange), count: assetTracks!.count)
-                try audioTrack?.insertTimeRanges(timeRanges, of: assetTracks!, at: startTime)
-            } catch {
-                BFLog(message: "error is \(error)")
-            }
-            return mixInputParameters
-        }
-        return nil
-    }
-
-    // 导出音频
-    /// - Parameter comosition: <#comosition description#>
-    /// - Returns: <#description#>
-    public class func exportAudio(comosition: AVAsset) {
-        let outPutFilePath = URL(fileURLWithPath: tempDirectory + "/temp.mp4")
-        // 删除以创建地址
-        try? FileManager.default.removeItem(at: outPutFilePath)
-        let assetExport = AVAssetExportSession(asset: comosition, presetName: AVAssetExportPresetMediumQuality)
-        assetExport?.outputFileType = .mp4
-        assetExport?.outputURL = outPutFilePath
-        assetExport?.exportAsynchronously(completionHandler: {
-            print("assetExport == \(assetExport?.status.rawValue ?? 0),error = \(String(describing: assetExport?.error))")
-            DispatchQueue.main.async {}
-        })
-    }
-}