|
@@ -1,855 +0,0 @@
|
|
|
-//
|
|
|
-// PQPlayerViewModel.swift
|
|
|
-// PQSpeed
|
|
|
-//
|
|
|
-// Created by ak on 2021/1/27.
|
|
|
-// Copyright © 2021 BytesFlow. All rights reserved.
|
|
|
-// 视频渲染相关逻辑方法
|
|
|
-
|
|
|
-import RealmSwift
|
|
|
-import UIKit
|
|
|
-import BFCommonKit
|
|
|
-import BFUIKit
|
|
|
-import BFMediaKit
|
|
|
-
|
|
|
-open class PQPlayerViewModel: NSObject {
|
|
|
- /// 根据贴纸信息转成种 fitler ,编辑 ,总览,导出共用
|
|
|
- /// - Parameter parts: filter 组
|
|
|
- public class func partModelToFilters(sections: [PQEditSectionModel], inputSize: CGSize = .zero) -> ([PQBaseFilter], [URL]) {
|
|
|
- // 所有段的声音位置
|
|
|
- var audioFiles: Array = Array<URL>.init()
|
|
|
- // 所有滤镜数组
|
|
|
- var filters: Array = Array<PQBaseFilter>.init()
|
|
|
-
|
|
|
- /*
|
|
|
- 一, 默认素材时长
|
|
|
- 图片:2S
|
|
|
- 视频: X1倍速 播一边
|
|
|
- GIF: X1倍速 播一边
|
|
|
-
|
|
|
- 二,资源适配规则
|
|
|
- 1,有配音声音 也就是有文字
|
|
|
- 适配系数 = 配音时长/视觉总时长
|
|
|
- 视觉元素最终时长 = 视觉元素原时长 * 适配系数
|
|
|
- 2,无配音无文字
|
|
|
- 使用素材的默认时长
|
|
|
- 3,无配音有文字
|
|
|
- 适配系数 = 视频总时长/文字总时长
|
|
|
- 文字每一句的实际时长 = 文字分段落的原始时长 * 适配系统
|
|
|
-
|
|
|
- */
|
|
|
-
|
|
|
- // 返回时自动预览开始播放 添加有贴纸开始自动播放
|
|
|
-
|
|
|
- var partTotaDuration: Float64 = 0
|
|
|
- for section in sections {
|
|
|
- autoreleasepool {
|
|
|
- // 优先使用 mix audio
|
|
|
- if section.mixEmptyAuidoFilePath.count > 0 {
|
|
|
- audioFiles.append(URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath.replacingOccurrences(of: documensDirectory, with: "")))
|
|
|
- BFLog(message: "add mixEmptyAuidoFilePath mixEmptyAuidoFilePath")
|
|
|
- } else {
|
|
|
- if section.audioFilePath.count > 0 {
|
|
|
- audioFiles.append(URL(fileURLWithPath: documensDirectory + section.audioFilePath.replacingOccurrences(of: documensDirectory, with: "")))
|
|
|
- BFLog(message: "add audioFilePath audioFilePath")
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- var totalDuration: Float64 = 0
|
|
|
- // 根据已经选择的贴纸类型创建各自filters
|
|
|
- for sticker in section.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
|
|
|
- autoreleasepool {
|
|
|
-
|
|
|
- sticker.timelineIn = totalDuration + partTotaDuration
|
|
|
- totalDuration = totalDuration + sticker.aptDuration
|
|
|
- sticker.timelineOut = totalDuration + partTotaDuration
|
|
|
- BFLog(message: "创建 filter start :\(sticker.timelineIn) end :\(sticker.timelineOut) type is \(sticker.type)")
|
|
|
- if(sticker.aptDuration > 0){
|
|
|
- if sticker.type == StickerType.IMAGE.rawValue {
|
|
|
- let imageFilter = PQImageFilter(sticker: sticker)
|
|
|
- filters.append(imageFilter)
|
|
|
-
|
|
|
- } else if sticker.type == StickerType.VIDEO.rawValue {
|
|
|
- let videoFilter = PQMovieFilter(movieSticker: sticker)
|
|
|
-
|
|
|
- filters.append(videoFilter)
|
|
|
-
|
|
|
- } else if sticker.type == StickerType.GIF.rawValue {
|
|
|
- let gifFilter = PQGifFilter(sticker: sticker)
|
|
|
- filters.append(gifFilter)
|
|
|
- }
|
|
|
- }else{
|
|
|
- BFLog(message: "sticker.aptDuration is error create filter error!!! \(sticker.aptDuration )")
|
|
|
- }
|
|
|
-
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // 字幕如果是多段的 ,字幕的开始时间是 前几段 part duration 总时长 所以要重新计算
|
|
|
- var newSubtitleData: [PQEditSubTitleModel] = Array()
|
|
|
-
|
|
|
- // 如果有录制声音转的字幕优先使用,在使用人工输入文字字幕s
|
|
|
- let recorderSubtitle = List<PQEditSubTitleModel>()
|
|
|
- if section.sectionTimeline?.visionTrack?.getSubtitleMatraislInfo() != nil {
|
|
|
- for subtitleMatraislInfo in section.sectionTimeline!.visionTrack!.getSubtitleMatraislInfo() {
|
|
|
- BFLog(message: "有录音字幕")
|
|
|
- let editSubTitleModel = PQEditSubTitleModel()
|
|
|
- editSubTitleModel.text = subtitleMatraislInfo.subtitleInfo?.text ?? ""
|
|
|
- editSubTitleModel.timelineIn = subtitleMatraislInfo.timelineIn
|
|
|
- editSubTitleModel.timelineOut = subtitleMatraislInfo.timelineOut
|
|
|
- recorderSubtitle.append(editSubTitleModel)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- for (index, subTitle) in recorderSubtitle.count > 0 ? recorderSubtitle.enumerated() : section.subTitles.enumerated() {
|
|
|
- BFLog(message: "有配音字幕")
|
|
|
- let newSubtitle = PQEditSubTitleModel()
|
|
|
- newSubtitle.timelineIn = subTitle.timelineIn
|
|
|
- newSubtitle.timelineOut = subTitle.timelineOut
|
|
|
- newSubtitle.text = subTitle.text.replacingOccurrences(of: "\n", with: "")
|
|
|
- BFLog(message: "第\(index)个字幕 subTitle old start : \(newSubtitle.timelineIn) end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
|
|
|
-
|
|
|
- // subtitle duration
|
|
|
- let duration: Float64 = (newSubtitle.timelineOut - newSubtitle.timelineIn)
|
|
|
-
|
|
|
- newSubtitle.timelineIn = partTotaDuration + newSubtitle.timelineIn
|
|
|
- newSubtitle.timelineOut = newSubtitle.timelineIn + duration
|
|
|
-
|
|
|
- BFLog(message: "第\(index)个字幕 subTitle new start : \(newSubtitle.timelineIn) end: \(newSubtitle.timelineOut) text: \(newSubtitle.text)")
|
|
|
-
|
|
|
- newSubtitleData.append(newSubtitle)
|
|
|
-
|
|
|
-// let subTitle = PQSubTitleFilter(st: [newSubtitle], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
|
|
|
-// filters.append(subTitle)
|
|
|
- }
|
|
|
- // 无视觉素材是大字幕方式 有数据在初始字幕filter
|
|
|
-
|
|
|
-// for subtitle in newSubtitleData{
|
|
|
-// let subTitleFilter = PQSubTitleFilter(st: [newSubtitleData[0]], isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0,inputSize: inputSize)
|
|
|
-// filters.append(subTitleFilter)
|
|
|
-// }
|
|
|
-
|
|
|
- if newSubtitleData.count > 0 {
|
|
|
- let subTitleFilter = PQSubTitleFilter(st: newSubtitleData, isBig: section.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count == 0, inputSize: inputSize)
|
|
|
- filters.append(subTitleFilter)
|
|
|
-
|
|
|
-// DispatchQueue.main.async {
|
|
|
-
|
|
|
-// }
|
|
|
- }
|
|
|
-
|
|
|
- var tempDuration = section.allStickerAptDurationNoRound() == 0 ? section.sectionDuration : section.allStickerAptDurationNoRound()
|
|
|
- BFLog(message: "tempDuration 1 is \(tempDuration)")
|
|
|
- // 如果音频时长是经过加空音频 加长后的 要使用长音频
|
|
|
- if section.mixEmptyAuidoFilePath.count > 0 {
|
|
|
- BFLog(message: "有拼接的数据")
|
|
|
- let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + section.mixEmptyAuidoFilePath), options: avAssertOptions)
|
|
|
- if tempDuration <= audioAsset.duration.seconds {
|
|
|
- tempDuration = audioAsset.duration.seconds
|
|
|
- } else {
|
|
|
- BFLog(message: "音频文件时长为0?")
|
|
|
- }
|
|
|
- }
|
|
|
- BFLog(message: "tempDuration 2 is \(tempDuration)")
|
|
|
-
|
|
|
- partTotaDuration = partTotaDuration + tempDuration
|
|
|
- }
|
|
|
- BFLog(message: "audioFiles 声音文件总数\(audioFiles.count)")
|
|
|
- }
|
|
|
- //"/Resource/DownloadImages/images_1631358852.933532"
|
|
|
- //""/Resource/DownloadImages/images_1631358852.933532""
|
|
|
- return (filters, audioFiles)
|
|
|
- }
|
|
|
-
|
|
|
- public class func calculationStickAptDurationReal(currentPart: PQEditSectionModel, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
|
|
|
- // XXXXXX如果 没有选择发音人 就算有自动的转的声音文件也不按声音时长计算,都是素材原有时长
|
|
|
-// let audioTotalDuration: Float64 = Float64(currentPart.sectionDuration)
|
|
|
- // 1,计算贴纸所有原始时长
|
|
|
- var stickerTotalDuration: Float64 = 0
|
|
|
-
|
|
|
- for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
|
|
|
- var stikcerDuration: Float64 = sticker.duration
|
|
|
- if sticker.videoIsCrop() {
|
|
|
- BFLog(message: "这个视频有裁剪 \(sticker.locationPath)")
|
|
|
- stikcerDuration = sticker.out - sticker.model_in
|
|
|
- }
|
|
|
-
|
|
|
- stickerTotalDuration = stickerTotalDuration + stikcerDuration
|
|
|
- }
|
|
|
-
|
|
|
- // 真人声音时长
|
|
|
- var realAudioDuration = 0.0
|
|
|
- BFLog(message: "currentPart.audioFilePath is \(currentPart.audioFilePath)")
|
|
|
- if currentPart.audioFilePath.count > 0 {
|
|
|
- let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath), options: avAssertOptions)
|
|
|
- realAudioDuration = audioAsset.duration.seconds
|
|
|
- }
|
|
|
-
|
|
|
- BFLog(message: "所有素材的总时 \(stickerTotalDuration) 文字转语音的时长:\(realAudioDuration)")
|
|
|
-
|
|
|
- if stickerTotalDuration == 0 && realAudioDuration == 0 {
|
|
|
- DispatchQueue.main.async {
|
|
|
- completeHander(currentPart)
|
|
|
- }
|
|
|
- return
|
|
|
- }
|
|
|
-
|
|
|
- // 所有视频素材原有时长 > 音频文件(字幕时长 有可能有声音,有可能没有声音自动转的)
|
|
|
- if stickerTotalDuration - realAudioDuration > 0.01 {
|
|
|
- // 要创建空文件加长原有声音
|
|
|
- let tool = PQCreateEmptyWAV(sampleRate: 8000,
|
|
|
- channel: 1,
|
|
|
- duration: stickerTotalDuration - realAudioDuration,
|
|
|
- bit: 16)
|
|
|
- let timeInterval: TimeInterval = Date().timeIntervalSince1970
|
|
|
-
|
|
|
- var audioFileTempPath = exportAudiosDirectory
|
|
|
- if !directoryIsExists(dicPath: audioFileTempPath) {
|
|
|
- BFLog(message: "文件夹不存在 \(audioFileTempPath)")
|
|
|
- createDirectory(path: audioFileTempPath)
|
|
|
- }
|
|
|
-
|
|
|
- audioFileTempPath.append("empty_\(timeInterval).wav")
|
|
|
-
|
|
|
- tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
|
|
|
-
|
|
|
- var tempUrls: Array = NSArray() as! [URL]
|
|
|
-
|
|
|
- if currentPart.audioFilePath.count > 0 {
|
|
|
- BFLog(message: "currentPart.audioFilePath is \(String(describing: currentPart.audioFilePath))")
|
|
|
- tempUrls.append(URL(fileURLWithPath: documensDirectory + currentPart.audioFilePath))
|
|
|
- }
|
|
|
- tempUrls.append(URL(fileURLWithPath: audioFileTempPath))
|
|
|
-
|
|
|
- PQPlayerViewModel.mergeAudios(urls: tempUrls) { completURL in
|
|
|
-
|
|
|
- if completURL == nil {
|
|
|
- BFLog(message: "合并文件有问题!")
|
|
|
- return
|
|
|
- }
|
|
|
- // file:///var/mobile/Containers/Data/Application/2A008644-31A6-4D7E-930B-F1099F36D577/Documents/Resource/ExportAudios/merge_1618817019.789495.m4a
|
|
|
- let audioAsset = AVURLAsset(url: completURL!, options: avAssertOptions)
|
|
|
-
|
|
|
- BFLog(message: "completURL mix : \(String(describing: completURL)) audioFilePath durtion \(audioAsset.duration.seconds)")
|
|
|
-
|
|
|
- currentPart.mixEmptyAuidoFilePath = completURL!.absoluteString.replacingOccurrences(of: documensDirectory, with: "").replacingOccurrences(of: "file://", with: "")
|
|
|
- currentPart.sectionDuration = audioAsset.duration.seconds
|
|
|
-
|
|
|
- BFLog(message: "stickerTotalDuration is \(stickerTotalDuration) mixEmptyAuidoFilePath 设置后 是\(currentPart.mixEmptyAuidoFilePath) 时长是:\(currentPart.sectionDuration)")
|
|
|
-
|
|
|
- // 1.2)计算贴纸的逻辑显示时长
|
|
|
- for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
|
|
|
- var tempDuration = sticker.duration
|
|
|
- if sticker.videoIsCrop() {
|
|
|
- tempDuration = sticker.out - sticker.model_in
|
|
|
- BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
|
|
|
- }
|
|
|
- sticker.aptDuration = tempDuration
|
|
|
- }
|
|
|
-
|
|
|
- DispatchQueue.main.async {
|
|
|
- completeHander(currentPart)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- } else {
|
|
|
- // 这种情况下 mixEmptyAuidoFilePath 应该为空
|
|
|
- currentPart.mixEmptyAuidoFilePath = ""
|
|
|
-// currentPart.audioFilePath = ""
|
|
|
- currentPart.sectionDuration = realAudioDuration
|
|
|
- // 1.1)计算系数
|
|
|
- let coefficient: Float64 = realAudioDuration / stickerTotalDuration
|
|
|
-
|
|
|
- BFLog(message: "系数 is: \(coefficient) stickerTotalDuration is \(stickerTotalDuration) audioTotalDuration is :\(realAudioDuration)")
|
|
|
-
|
|
|
- // 1.2)计算贴纸的逻辑显示时长
|
|
|
- for sticker in currentPart.sectionTimeline!.visionTrack!.getEnableVisionTrackMaterials() {
|
|
|
- // 如果是视频素材有过裁剪 就使用裁剪时长
|
|
|
- var tempDuration = sticker.duration
|
|
|
-
|
|
|
- if sticker.videoIsCrop() {
|
|
|
- tempDuration = sticker.out - sticker.model_in
|
|
|
- BFLog(message: "这个视频有裁剪后:\(tempDuration) \(String(describing: sticker.locationPath))")
|
|
|
- }
|
|
|
- // 如果没有音频 系数为0时 使用素材的原始时长
|
|
|
- sticker.aptDuration = (coefficient == 0) ? tempDuration : tempDuration * coefficient
|
|
|
- }
|
|
|
-
|
|
|
- DispatchQueue.main.async {
|
|
|
- completeHander(currentPart)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // 计算所有贴纸的逻辑时长
|
|
|
- public class func calculationStickAptDuration(currentPart: PQEditSectionModel, createFirst: Bool = true, completeHander: @escaping (_ returnPart: PQEditSectionModel?) -> Void) {
|
|
|
- if currentPart.sectionType == "global" {
|
|
|
- BFLog(message: "音频段落不处理计算")
|
|
|
- return
|
|
|
- }
|
|
|
- // 从素材详细界面返回 有可能是删除素材操作 这时如果没有选择发音人同时没有录音和导入数据要重新计算空文件时长
|
|
|
- let speeckAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.SPEECH.rawValue)
|
|
|
-
|
|
|
- let localAudioTrackModel = currentPart.sectionTimeline?.audioTrack?.getAudioTrackModel(voiceType: VOICETYPT.LOCAL.rawValue)
|
|
|
-
|
|
|
- if !currentPart.haveSelectVoice(), speeckAudioTrackModel == nil, localAudioTrackModel == nil, createFirst {
|
|
|
- // 只有视觉素材 没有文字
|
|
|
- if currentPart.sectionText.count == 0 {
|
|
|
- // 根据视觉的总时长生成空音频数据
|
|
|
- var timeCount: Double = 0
|
|
|
-
|
|
|
- for sticker in (currentPart.sectionTimeline!.visionTrack?.getEnableVisionTrackMaterials())! {
|
|
|
- if sticker.out != 0 || sticker.model_in == 0 {
|
|
|
- timeCount = timeCount + (sticker.out - sticker.model_in)
|
|
|
-
|
|
|
- } else {
|
|
|
- timeCount = timeCount + sticker.aptDuration
|
|
|
- }
|
|
|
- }
|
|
|
- BFLog(message: "计算视觉的总时长 \(timeCount)")
|
|
|
- if timeCount > 0 {
|
|
|
- let tool = PQCreateEmptyWAV(sampleRate: 8000,
|
|
|
- channel: 1,
|
|
|
- duration: timeCount,
|
|
|
- bit: 16)
|
|
|
- let timeInterval: TimeInterval = Date().timeIntervalSince1970
|
|
|
-
|
|
|
- var audioFileTempPath = exportAudiosDirectory
|
|
|
- if !directoryIsExists(dicPath: audioFileTempPath) {
|
|
|
- BFLog(message: "文件夹不存在 \(audioFileTempPath)")
|
|
|
- createDirectory(path: audioFileTempPath)
|
|
|
- }
|
|
|
-
|
|
|
- audioFileTempPath.append("empty_\(timeInterval).wav")
|
|
|
-
|
|
|
- tool.createEmptyWAVFile(url: URL(fileURLWithPath: audioFileTempPath)) { _ in
|
|
|
- currentPart.audioFilePath = audioFileTempPath.replacingOccurrences(of: documensDirectory, with: "")
|
|
|
-
|
|
|
- calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
|
|
|
- }
|
|
|
- } else {
|
|
|
- calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
|
|
|
- }
|
|
|
- } else {
|
|
|
- calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
|
|
|
- }
|
|
|
- } else {
|
|
|
- calculationStickAptDurationReal(currentPart: currentPart, completeHander: completeHander)
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // 首尾拼接音频文件
|
|
|
- /*
|
|
|
- 因为在对音频做合并或者裁切的时候生成的音频格式是m4a的,但是m4a转成mp3会损坏音频格式,所以我当时采用先把m4a转为wav,再用wav转成mp3。
|
|
|
- */
|
|
|
-
|
|
|
- /// 合并声音
|
|
|
- /// - Parameter urls: 所有音频的URL 是全路径方便复用
|
|
|
- /// - Parameter completeHander: 返回的 URL 全路径的 URL 如果要保存替换掉前缀
|
|
|
- public class func mergeAudios(urls: [URL], completeHander: @escaping (_ fileURL: URL?) -> Void) {
|
|
|
- let timeInterval: TimeInterval = Date().timeIntervalSince1970
|
|
|
- let composition = AVMutableComposition()
|
|
|
- var totalDuration: CMTime = .zero
|
|
|
- BFLog(message: "合并文件总数 \(urls.count)")
|
|
|
- for urlStr in urls {
|
|
|
- BFLog(message: "合并的文件地址: \(urlStr)")
|
|
|
- let audioAsset = AVURLAsset(url: urlStr, options: avAssertOptions)
|
|
|
- let tracks1 = audioAsset.tracks(withMediaType: .audio)
|
|
|
- if tracks1.count == 0 {
|
|
|
- BFLog(message: "音频数据无效不进行合并,所有任务结束要确保输入的数据都正常! \(urlStr)")
|
|
|
- break
|
|
|
- }
|
|
|
- let assetTrack1: AVAssetTrack = tracks1[0]
|
|
|
-
|
|
|
- let duration1: CMTime = assetTrack1.timeRange.duration
|
|
|
-
|
|
|
- BFLog(message: "每一个文件的 duration \(CMTimeGetSeconds(duration1))")
|
|
|
-
|
|
|
- let timeRange1 = CMTimeRangeMake(start: .zero, duration: duration1)
|
|
|
-
|
|
|
- let compositionAudioTrack: AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaType.audio, preferredTrackID: CMPersistentTrackID())!
|
|
|
-
|
|
|
- do {
|
|
|
- //
|
|
|
- try compositionAudioTrack.insertTimeRange(timeRange1, of: assetTrack1, at: totalDuration)
|
|
|
-
|
|
|
- } catch {
|
|
|
- BFLog(message: "error is \(error)")
|
|
|
- }
|
|
|
-
|
|
|
- totalDuration = CMTimeAdd(totalDuration, audioAsset.duration)
|
|
|
- }
|
|
|
-
|
|
|
- if CMTimeGetSeconds(totalDuration) == 0 {
|
|
|
- BFLog(message: "所有数据无效")
|
|
|
- completeHander(nil)
|
|
|
- return
|
|
|
- } else {
|
|
|
-// 拼接声音文件 完成
|
|
|
- BFLog(message: "totalDuration is \(CMTimeGetSeconds(totalDuration))")
|
|
|
- }
|
|
|
-
|
|
|
- let assetExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetAppleM4A)
|
|
|
- BFLog(message: "assetExport.supportedFileTypes is \(String(describing: assetExport?.supportedFileTypes))")
|
|
|
-
|
|
|
- assetExport?.outputFileType = .m4a
|
|
|
- // XXXX 注意文件名的后缀要和outputFileType 一致 否则会导出失败
|
|
|
- var audioFilePath = exportAudiosDirectory
|
|
|
-
|
|
|
- if !directoryIsExists(dicPath: audioFilePath) {
|
|
|
- BFLog(message: "文件夹不存在")
|
|
|
- createDirectory(path: audioFilePath)
|
|
|
- }
|
|
|
- audioFilePath.append("merge_\(timeInterval).m4a")
|
|
|
-
|
|
|
- let fileUrl = URL(fileURLWithPath: audioFilePath)
|
|
|
-
|
|
|
- assetExport?.outputURL = fileUrl
|
|
|
- assetExport?.exportAsynchronously {
|
|
|
- if assetExport!.status == .completed {
|
|
|
- // 85.819125
|
|
|
- let audioAsset = AVURLAsset(url: fileUrl, options: avAssertOptions)
|
|
|
-
|
|
|
- BFLog(message: "拼接声音文件 完成 \(fileUrl) 时长is \(CMTimeGetSeconds(audioAsset.duration))")
|
|
|
- completeHander(fileUrl)
|
|
|
-
|
|
|
- } else {
|
|
|
- print("拼接出错 \(String(describing: assetExport?.error))")
|
|
|
- completeHander(URL(string: ""))
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- /// 根据选择的画布类型计算播放器显示的位置和大小
|
|
|
- /// - Parameters:
|
|
|
- /// - editProjectModel: 项目数据
|
|
|
- /// - showType: 显示类型 1, 编辑界面 2,总览界面
|
|
|
- /// - Returns: 显示的坐标和位置
|
|
|
- public class func getShowCanvasRect(editProjectModel: PQEditProjectModel?, showType: Int, playerViewHeight: CGFloat = 216 / 667 * cScreenHeigth) -> CGRect {
|
|
|
- if editProjectModel == nil {
|
|
|
- BFLog(message: "editProjectModel is error")
|
|
|
- return CGRect()
|
|
|
- }
|
|
|
- // UI播放器的最大高度,同时最大宽度为设备宽度
|
|
|
- var showRect: CGRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
|
|
|
-
|
|
|
- let canvasType: Int = editProjectModel!.sData!.videoMetaData!.canvasType
|
|
|
-
|
|
|
- if showType == 1 { // 编辑界面
|
|
|
- switch canvasType {
|
|
|
- case videoCanvasType.origin.rawValue:
|
|
|
-
|
|
|
- // 使用有效素材第一位
|
|
|
- var firstModel: PQEditVisionTrackMaterialsModel?
|
|
|
- for part in editProjectModel!.sData!.sections {
|
|
|
- if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
|
|
|
- firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
|
|
|
- break
|
|
|
- }
|
|
|
- }
|
|
|
- if firstModel != nil {
|
|
|
- if firstModel?.width == 0 || firstModel?.height == 0 {
|
|
|
- BFLog(message: "!!!!!!!!!!!素材宽高有问题!!!!!!!!!!!")
|
|
|
- }
|
|
|
- BFLog(1, message: "第一个有效素材的大小 \(String(describing: firstModel?.width)) \(String(describing: firstModel?.height))")
|
|
|
- let ratioMaterial: Float = (firstModel?.width ?? 0) / (firstModel?.height ?? 0)
|
|
|
- if ratioMaterial > 1 {
|
|
|
- // 横屏
|
|
|
- var tempPlayerHeight = cScreenWidth * CGFloat(firstModel!.height / firstModel!.width)
|
|
|
- var scale: CGFloat = 1.0
|
|
|
- if tempPlayerHeight > playerViewHeight {
|
|
|
- scale = CGFloat(playerViewHeight) / CGFloat(tempPlayerHeight)
|
|
|
- tempPlayerHeight = tempPlayerHeight * scale
|
|
|
- }
|
|
|
- showRect = CGRect(x: (cScreenWidth - cScreenWidth * scale) / 2, y: (playerViewHeight - tempPlayerHeight) / 2, width: cScreenWidth * scale, height: tempPlayerHeight)
|
|
|
- } else {
|
|
|
- // 竖屏
|
|
|
- let playerViewWidth = (CGFloat(firstModel!.width) / CGFloat(firstModel!.height)) * playerViewHeight
|
|
|
- showRect = CGRect(x: (cScreenWidth - playerViewWidth) / 2, y: 0, width: playerViewWidth, height: playerViewHeight)
|
|
|
- }
|
|
|
- } else {
|
|
|
- // 没有视觉素材时,只有文字,语音时,默认为原始但显示的 VIEW 为 1:1
|
|
|
- showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
|
|
|
- }
|
|
|
-
|
|
|
- case videoCanvasType.oneToOne.rawValue:
|
|
|
- showRect = CGRect(x: (cScreenWidth - playerViewHeight) / 2, y: 0, width: playerViewHeight, height: playerViewHeight)
|
|
|
- case videoCanvasType.nineToSixteen.rawValue:
|
|
|
- showRect = CGRect(x: (cScreenWidth - playerViewHeight * (9.0 / 16.0)) / 2, y: 0, width: playerViewHeight * (9.0 / 16.0), height: playerViewHeight)
|
|
|
- case videoCanvasType.sixteenToNine.rawValue:
|
|
|
- showRect = CGRect(x: 0, y: 0 + (playerViewHeight - cScreenWidth * (9.0 / 16.0)) / 2, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
|
|
|
- default:
|
|
|
- break
|
|
|
- }
|
|
|
- } else if showType == 2 { // 总览界面
|
|
|
- switch canvasType {
|
|
|
- case videoCanvasType.origin.rawValue:
|
|
|
-
|
|
|
- BFLog(message: "总览时画布的大小 \(String(describing: editProjectModel!.sData!.videoMetaData?.videoWidth)) \(String(describing: editProjectModel!.sData!.videoMetaData?.videoHeight))")
|
|
|
- // 画布的宽高 和宽高比值
|
|
|
- let materialWidth = editProjectModel!.sData!.videoMetaData?.videoWidth ?? 0
|
|
|
- let materialHeight = editProjectModel!.sData!.videoMetaData?.videoHeight ?? 1
|
|
|
- let ratioMaterial: Float = Float(materialWidth) / Float(materialHeight)
|
|
|
-
|
|
|
- if ratioMaterial > 1 {
|
|
|
- // 横屏
|
|
|
- showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * CGFloat(materialHeight) / CGFloat(materialWidth))
|
|
|
- } else if ratioMaterial < 1 {
|
|
|
- // 竖屏
|
|
|
- showRect = CGRect(x: (cScreenWidth - cScreenWidth * CGFloat(materialWidth) / CGFloat(materialHeight)) / 2, y: 0, width: cScreenWidth * (CGFloat(materialWidth) / CGFloat(materialHeight)), height: cScreenWidth)
|
|
|
- BFLog(message: "showRect is \(showRect)")
|
|
|
- } else {
|
|
|
- showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
|
|
|
- }
|
|
|
-
|
|
|
- case videoCanvasType.oneToOne.rawValue:
|
|
|
- showRect = CGRect(x: 0, y: 0, width: cScreenWidth - 2, height: cScreenWidth - 2)
|
|
|
- case videoCanvasType.nineToSixteen.rawValue:
|
|
|
- showRect = CGRect(x: (cScreenWidth - cScreenWidth * (9.0 / 16.0)) / 2, y: 0, width: cScreenWidth * (9.0 / 16.0), height: cScreenWidth)
|
|
|
- case videoCanvasType.sixteenToNine.rawValue:
|
|
|
- showRect = CGRect(x: 0, y: 0, width: cScreenWidth, height: cScreenWidth * (9.0 / 16.0))
|
|
|
-
|
|
|
- default:
|
|
|
- break
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- return showRect
|
|
|
- }
|
|
|
-
|
|
|
- /*
|
|
|
- 1, 加工入口进入编辑界面 默认画布?默认为 原始
|
|
|
- 2,进入编辑界面如果选了一个素材 画布就是实际大小,
|
|
|
- 3,没视觉素材时 点击原始显示1:1
|
|
|
- 4, 上传入口进入编辑界面 默认画布为原始
|
|
|
- 5, 从草稿箱进来时,使用恢复的画布大小
|
|
|
- 6, 如果选择了原始,移动素材后都按最新的第一个素材修改画布
|
|
|
- */
|
|
|
-
|
|
|
- /// sdata json canvastype 转到 UI 所使用类型
|
|
|
- /// - Parameter projectModel: project sdata
|
|
|
- /// - Returns: UI 使用类型
|
|
|
- public class func videoCanvasTypeToAspectRatio(projectModel: PQEditProjectModel?) -> aspectRatio? {
|
|
|
- // add by ak 给素材详情界面传比例参数如果是原始大小的要传 size
|
|
|
- var aspectRatioTemp: aspectRatio?
|
|
|
- if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.origin.rawValue {
|
|
|
- var firstModel: PQEditVisionTrackMaterialsModel?
|
|
|
- for part in projectModel!.sData!.sections {
|
|
|
- if part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().count ?? 0 > 0 {
|
|
|
- firstModel = part.sectionTimeline?.visionTrack?.getEnableVisionTrackMaterials().first
|
|
|
- break
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- if firstModel != nil {
|
|
|
- aspectRatioTemp = .origin(width: CGFloat(firstModel!.width), height: CGFloat(firstModel!.height))
|
|
|
- } else {
|
|
|
- aspectRatioTemp = .origin(width: CGFloat(projectModel?.sData?.videoMetaData?.videoWidth ?? 0), height: CGFloat(projectModel?.sData?.videoMetaData?.videoHeight ?? 0))
|
|
|
- }
|
|
|
-
|
|
|
- } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.oneToOne.rawValue {
|
|
|
- aspectRatioTemp = .oneToOne
|
|
|
- } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.nineToSixteen.rawValue {
|
|
|
- aspectRatioTemp = .nineToSixteen
|
|
|
- } else if projectModel?.sData?.videoMetaData?.canvasType == videoCanvasType.sixteenToNine.rawValue {
|
|
|
- aspectRatioTemp = .sixteenToNine
|
|
|
- }
|
|
|
- return aspectRatioTemp
|
|
|
- }
|
|
|
-
|
|
|
- public class func getCanvasBtnName(canvasType: videoCanvasType) -> (String, String) {
|
|
|
- var btnText: String = "自适应"
|
|
|
- var btnImageName: String = "settingZoom_origin_h"
|
|
|
-
|
|
|
- if canvasType == .origin {
|
|
|
- btnText = "自适应"
|
|
|
- btnImageName = "settingZoom_origin_h"
|
|
|
-
|
|
|
- } else if canvasType == .oneToOne {
|
|
|
- btnText = "1:1"
|
|
|
- btnImageName = "settingZoom_oneToOne_h"
|
|
|
- } else if canvasType == .sixteenToNine {
|
|
|
- btnText = "16:9"
|
|
|
- btnImageName = "settingZoom_sixteenToNine_h"
|
|
|
- } else if canvasType == .nineToSixteen {
|
|
|
- btnText = "9:16"
|
|
|
- btnImageName = "settingZoom_nineToSixteen_h"
|
|
|
- }
|
|
|
-
|
|
|
- return (btnText, btnImageName)
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-// MARK: - 混音相关
|
|
|
-
|
|
|
-/// 混音相关
|
|
|
-extension PQPlayerViewModel {
|
|
|
- /// 混音合成
|
|
|
- /// - Parameters:
|
|
|
- /// - originAsset: 空音乐文件素材
|
|
|
- /// - bgmData: 背景音乐
|
|
|
- /// - videoStickers: 视频素材
|
|
|
- /// - originMusicDuration : 要播放的时长
|
|
|
- /// - lastSecondPoint : 音频长度不够时,拼接音频文件时的结束时间,推荐卡点的倒数第二位
|
|
|
- /// - startTime: 裁剪的开始位置。
|
|
|
- /// - Returns:
|
|
|
- public class func setupAudioMix(originAsset: AVURLAsset, bgmData: PQVoiceModel?, videoStickers: [PQEditVisionTrackMaterialsModel]?,originMusicDuration:Float = 0,clipAudioRange: CMTimeRange = CMTimeRange.zero,startTime:CMTime = .zero ) -> (AVMutableAudioMix, AVMutableComposition) {
|
|
|
- let composition = AVMutableComposition()
|
|
|
- let audioMix = AVMutableAudioMix()
|
|
|
- var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
|
|
|
-
|
|
|
- // 处理选择的主音乐
|
|
|
- if(originMusicDuration > Float(CMTimeGetSeconds(clipAudioRange.duration))){
|
|
|
- BFLog(message: "要播放的时间长,比原音频要长进行拼接originMusicDuration:\(originMusicDuration) originAsset.duration \(CMTimeGetSeconds(clipAudioRange.duration))")
|
|
|
- let originaParameters = dealWithOriginAssetTrack(originAsset: originAsset, totalDuration: Float64(originMusicDuration), composition: composition,clipAudioRange: clipAudioRange,mStartTime: startTime)
|
|
|
- BFLog(message: "originaParameters count \(originaParameters.count)")
|
|
|
- if originaParameters.count > 0 {
|
|
|
- tempParameters = tempParameters + originaParameters
|
|
|
- }
|
|
|
-
|
|
|
- }else{
|
|
|
- BFLog(message: "音频不用拼接:\(CMTimeGetSeconds(originAsset.duration))")
|
|
|
- let parameters = mixAudioTrack(audioAsset: originAsset, trackTimeRange: CMTimeRange(start: .zero, end: originAsset.duration), composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }else{
|
|
|
-
|
|
|
- BFLog(message: "parameters is error \(CMTimeGetSeconds(originAsset.duration))")
|
|
|
- }
|
|
|
- }
|
|
|
-
|
|
|
- // 处理背景音乐
|
|
|
- if bgmData != nil, bgmData?.localPath != nil {
|
|
|
- let bgmParameters = dealWithBGMTrack(bgmData: bgmData!, totalDuration: originAsset.duration.seconds, composition: composition)
|
|
|
- if bgmParameters.count > 0 {
|
|
|
- tempParameters = tempParameters + bgmParameters
|
|
|
- }
|
|
|
- }
|
|
|
- // 处理素材音乐
|
|
|
- if videoStickers != nil, (videoStickers?.count ?? 0) > 0 {
|
|
|
- for sticker in videoStickers! {
|
|
|
- if sticker.volumeGain == 0 {
|
|
|
- // 如果添加了会有刺啦音
|
|
|
- BFLog(message: "音频音量 为0 不添加")
|
|
|
- continue
|
|
|
- }
|
|
|
- let stickerParameters = dealWithMaterialTrack(stickerModel: sticker, composition: composition)
|
|
|
- if stickerParameters.count > 0 {
|
|
|
- tempParameters = tempParameters + stickerParameters
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- audioMix.inputParameters = tempParameters
|
|
|
- // 导出音乐
|
|
|
- // exportAudio(comosition: composition)
|
|
|
- return (audioMix, composition)
|
|
|
- }
|
|
|
-
|
|
|
- /// 处理原主音乐音轨 e.g. 原音频时长只有30s 要播放 250s 的音频 拼接原音频音轨
|
|
|
- /// - Parameters:
|
|
|
- /// - originAsset: 原音频文件地址
|
|
|
- /// - composition:
|
|
|
- /// - Returns:
|
|
|
- public class func dealWithOriginAssetTrack(originAsset: AVURLAsset, totalDuration: Float64, composition: AVMutableComposition,clipAudioRange: CMTimeRange = CMTimeRange.zero,mStartTime:CMTime = .zero ) -> [AVMutableAudioMixInputParameters] {
|
|
|
- var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
|
|
|
- let volume:Float = 1.0
|
|
|
- let originaDuration = CMTimeGetSeconds(clipAudioRange.duration)
|
|
|
- BFLog(message: "处理主音频 原始时长startTime = \(originaDuration) 要显示时长totalDuration = \(totalDuration)")
|
|
|
- //整倍数
|
|
|
- let count = Int(totalDuration) / Int(originaDuration)
|
|
|
-// count = count + 1
|
|
|
- //有余数多 clip 一整段
|
|
|
- let row = totalDuration - Double(count) * originaDuration
|
|
|
- //已经拼接的总时长
|
|
|
- var clipTotalDuration:Float = 0.0
|
|
|
- if count > 0 {
|
|
|
- for index in 0 ..< count {
|
|
|
- BFLog(message: "this is running running")
|
|
|
- //第一段是用户选择的开始时间 到倒数第二个卡点, 其它段都是从推荐卡点到倒数第二个卡点
|
|
|
- var startTime = CMTime.zero
|
|
|
- var trackTimeRange = clipAudioRange
|
|
|
-
|
|
|
- if(index == 0){
|
|
|
- startTime = mStartTime
|
|
|
- trackTimeRange = CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue(CMTimeGetSeconds(clipAudioRange.end)), timescale: playerTimescaleInt))
|
|
|
- clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
|
|
|
- }else{
|
|
|
- // (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))为用户选择的第一段时长
|
|
|
- startTime = CMTime(value: CMTimeValue((CMTimeGetSeconds( clipAudioRange.duration) * Double(index) + (CMTimeGetSeconds(clipAudioRange.end) - CMTimeGetSeconds(mStartTime))) * Float64(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- trackTimeRange = clipAudioRange
|
|
|
-
|
|
|
- clipTotalDuration = clipTotalDuration + Float(CMTimeGetSeconds(trackTimeRange.duration))
|
|
|
- }
|
|
|
-// BFLog(1, message: "原音频时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
|
|
|
- let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }else{
|
|
|
- BFLog(message: "接拼出现错误!!!!")
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- if(row > 0){
|
|
|
-
|
|
|
- let startTime = CMTime(value: CMTimeValue(clipTotalDuration * Float(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
-
|
|
|
- let trackTimeRange = CMTimeRange(start: startTime, end: CMTime(value: CMTimeValue((CMTimeGetSeconds(startTime) + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- BFLog(1, message: "最后一小段音乐时长短:count = \(count),startTime = \(CMTimeShow(startTime)),trackTimeRange = \(CMTimeRangeShow(trackTimeRange))")
|
|
|
- let parameters = mixAudioTrack(audioAsset: originAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- clipTotalDuration = clipTotalDuration + Float(row)
|
|
|
-
|
|
|
- }
|
|
|
- BFLog(message: "拼接的音频总时长: \(clipTotalDuration)")
|
|
|
-
|
|
|
- return tempParameters
|
|
|
- }
|
|
|
-
|
|
|
- /// 处理背景音乐音轨
|
|
|
- /// - Parameters:
|
|
|
- /// - stickerModel: <#stickerModel description#>
|
|
|
- /// - composition: <#composition description#>
|
|
|
- /// - Returns: <#description#>
|
|
|
- public class func dealWithBGMTrack(bgmData: PQVoiceModel, totalDuration: Float64, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
|
|
|
- var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
|
|
|
- let bgmAsset = AVURLAsset(url: URL(fileURLWithPath: bgmData.localPath ?? ""), options: avAssertOptions)
|
|
|
- let volume = Float(bgmData.volume) / 100.0
|
|
|
- let bgmDuration = (Float64(bgmData.duration ?? "0") ?? 0) - bgmData.startTime
|
|
|
- BFLog(message: "处理背景音乐:startTime = \(bgmData.startTime),bgmDuration = \(bgmDuration),totalDuration = \(totalDuration)")
|
|
|
-
|
|
|
- if bgmDuration < totalDuration {
|
|
|
- let count = Int(totalDuration) / Int(bgmDuration)
|
|
|
- let row = totalDuration - Double(count) * bgmDuration
|
|
|
- if count > 0 {
|
|
|
- for index in 0 ..< count {
|
|
|
- let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(index) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + bgmDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
|
|
|
- let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- if row > 0 {
|
|
|
- let startTime = CMTime(value: CMTimeValue(bgmDuration * Double(count) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- BFLog(message: "背景音乐时长短:count = \(count),startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
|
|
|
- let parameters = mixAudioTrack(audioAsset: bgmAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(bgmData.startTime * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((bgmData.startTime + totalDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- BFLog(message: "背景音乐时长长:trackTimeRange = \(trackTimeRange)")
|
|
|
- let bgmParameters = mixAudioTrack(audioAsset: bgmAsset, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if bgmParameters != nil {
|
|
|
- tempParameters.append(bgmParameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- return tempParameters
|
|
|
- }
|
|
|
-
|
|
|
- /// 处理素材音轨
|
|
|
- /// - Parameters:
|
|
|
- /// - stickerModel: <#stickerModel description#>
|
|
|
- /// - composition: <#composition description#>
|
|
|
- /// - Returns: <#description#>
|
|
|
- public class func dealWithMaterialTrack(stickerModel: PQEditVisionTrackMaterialsModel, composition: AVMutableComposition) -> [AVMutableAudioMixInputParameters] {
|
|
|
- var tempParameters: [AVMutableAudioMixInputParameters] = [AVMutableAudioMixInputParameters].init()
|
|
|
- let audioAsset = AVURLAsset(url: URL(fileURLWithPath: documensDirectory + stickerModel.locationPath), options: avAssertOptions)
|
|
|
- let volume = Float(stickerModel.volumeGain) / 100
|
|
|
- let rangeStart = stickerModel.model_in
|
|
|
- var rangeEnd = stickerModel.out
|
|
|
- if rangeEnd == 0 {
|
|
|
- rangeEnd = audioAsset.duration.seconds
|
|
|
- }
|
|
|
- var originDuration = (rangeEnd - rangeStart)
|
|
|
- if stickerModel.aptDuration < originDuration {
|
|
|
- originDuration = stickerModel.aptDuration
|
|
|
- }
|
|
|
-
|
|
|
- if stickerModel.aptDuration > originDuration, stickerModel.materialDurationFit?.fitType == adapterMode.loopAuto.rawValue {
|
|
|
- let count = originDuration == 0 ? 0 : Int(stickerModel.aptDuration) / Int(originDuration)
|
|
|
- let row = stickerModel.aptDuration - Double(count) * originDuration
|
|
|
- if count > 0 {
|
|
|
- for index in 0 ..< count {
|
|
|
- let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(index)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- }
|
|
|
- if row > 0 {
|
|
|
- let startTime = CMTime(value: CMTimeValue((stickerModel.timelineIn + originDuration * Double(count)) * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + row) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- } else {
|
|
|
- let startTime = CMTime(value: CMTimeValue(stickerModel.timelineIn * Double(playerTimescaleInt)), timescale: playerTimescaleInt)
|
|
|
- let trackTimeRange = CMTimeRange(start: CMTime(value: CMTimeValue(rangeStart * Double(playerTimescaleInt)), timescale: playerTimescaleInt), end: CMTime(value: CMTimeValue((rangeStart + originDuration) * Double(playerTimescaleInt)), timescale: playerTimescaleInt))
|
|
|
- let parameters = mixAudioTrack(audioAsset: audioAsset, startTime: startTime, trackTimeRange: trackTimeRange, volume: volume, composition: composition)
|
|
|
- if parameters != nil {
|
|
|
- tempParameters.append(parameters!)
|
|
|
- }
|
|
|
- }
|
|
|
- return tempParameters
|
|
|
- }
|
|
|
-
|
|
|
- /// 混音添加音轨
|
|
|
- /// - Parameters:
|
|
|
- /// - audioAsset: 素材资源
|
|
|
- /// - startTime: 从什么时间开始播放
|
|
|
- /// - trackTimeRange: 播放素材范围
|
|
|
- /// - volume:音轨音量
|
|
|
- /// - composition: <#composition description#>
|
|
|
- /// - Returns: <#description#>
|
|
|
- public class func mixAudioTrack(audioAsset: AVURLAsset, startTime: CMTime = CMTime.zero, trackTimeRange: CMTimeRange, volume: Float = 1, composition: AVMutableComposition) -> AVMutableAudioMixInputParameters? {
|
|
|
- BFLog(message: "startTime = \(startTime),trackTimeRange = \(trackTimeRange)")
|
|
|
- // 第一个音轨
|
|
|
- // let assetTrack : AVAssetTrack? = audioAsset.tracks(withMediaType: .audio).first
|
|
|
- // 所有音轨
|
|
|
- let assetTracks: [AVAssetTrack]? = audioAsset.tracks(withMediaType: .audio)
|
|
|
- if assetTracks != nil, (assetTracks?.count ?? 0) > 0 {
|
|
|
- let audioTrack: AVMutableCompositionTrack? = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
|
|
|
- let mixInputParameters = AVMutableAudioMixInputParameters(track: audioTrack)
|
|
|
- mixInputParameters.setVolume(volume, at: startTime)
|
|
|
- do {
|
|
|
- // 第一个音轨插入到原音的开始和结束位置
|
|
|
- // try audioTrack?.insertTimeRange(trackTimeRange, of: assetTrack!, at: startTime)
|
|
|
- // 所有音轨插入到原音的开始和结束位置
|
|
|
- let timeRanges = Array(repeating: NSValue(timeRange: trackTimeRange), count: assetTracks!.count)
|
|
|
- try audioTrack?.insertTimeRanges(timeRanges, of: assetTracks!, at: startTime)
|
|
|
- } catch {
|
|
|
- BFLog(message: "error is \(error)")
|
|
|
- }
|
|
|
- return mixInputParameters
|
|
|
- }
|
|
|
- return nil
|
|
|
- }
|
|
|
-
|
|
|
- // 导出音频
|
|
|
- /// - Parameter comosition: <#comosition description#>
|
|
|
- /// - Returns: <#description#>
|
|
|
- public class func exportAudio(comosition: AVAsset) {
|
|
|
- let outPutFilePath = URL(fileURLWithPath: tempDirectory + "/temp.mp4")
|
|
|
- // 删除以创建地址
|
|
|
- try? FileManager.default.removeItem(at: outPutFilePath)
|
|
|
- let assetExport = AVAssetExportSession(asset: comosition, presetName: AVAssetExportPresetMediumQuality)
|
|
|
- assetExport?.outputFileType = .mp4
|
|
|
- assetExport?.outputURL = outPutFilePath
|
|
|
- assetExport?.exportAsynchronously(completionHandler: {
|
|
|
- print("assetExport == \(assetExport?.status.rawValue ?? 0),error = \(String(describing: assetExport?.error))")
|
|
|
- DispatchQueue.main.async {}
|
|
|
- })
|
|
|
- }
|
|
|
-}
|