瀏覽代碼

图片录音问题拖动的按钮显示问题

harry 3 年之前
父節點
當前提交
6878140ea3

+ 2 - 2
BFRecordScreenKit/Classes/BFRecordExport.swift

@@ -85,7 +85,7 @@ public class BFRecordExport {
                         continue
                     }
 
-                    var duration = itemModel.materialDuraion
+                    var duration = itemModel.materialDuraion.seconds
                     if itemModel.voiceStickers.count == 0 {
                         // 图片无录音保持2s
                         duration = 2
@@ -322,7 +322,7 @@ public class BFRecordExport {
                 if itemModell.voiceStickers.count == 0, synthesisAll {
                     modelDuraion += 2
                 } else {
-                    modelDuraion = itemModell.materialDuraion
+                    modelDuraion = itemModell.materialDuraion.seconds
                 }
             } else if itemModell.mediaType == .VIDEO {
                 modelDuraion = itemModell.dealedDurationRanges.reduce(0.0) { partialResult, srange in

+ 8 - 8
BFRecordScreenKit/Classes/BFRecordItemModel.swift

@@ -20,7 +20,7 @@ public class BFRecordItemModel: NSObject {
 //    var baseMaterial : AVURLAsset?
     var localPath: String?
     var coverPath: String?
-    var materialDuraion: Double = 0.0
+    var materialDuraion: CMTime = .zero
     var fetchCoverImgCallBack: ((UIImage) -> Void)?
     var fetchAVUrlAssetCallBack: (() -> Void)?
     var fetchPlayItemCallBack: ((BFRecordItemModel?) -> Void)?
@@ -46,7 +46,7 @@ public class BFRecordItemModel: NSObject {
     func initOriginData(phasset: PHAsset) {
         width = phasset.pixelWidth
         height = phasset.pixelHeight
-        materialDuraion = CMTime.init(seconds: phasset.duration, preferredTimescale: 1000).seconds
+        materialDuraion = CMTime.init(seconds: phasset.duration, preferredTimescale: 1000)
         fetchCoverImage(phasset)
         fetchAVUrlAsset(phasset)
 
@@ -108,7 +108,7 @@ public class BFRecordItemModel: NSObject {
     func generationTimeRanges(needSort _: Bool = false) {
         dealedDurationRanges.removeAll()
 
-        var start: Double = 0
+        var start: CMTime = .zero
 
         var list: [PQVoiceModel]
         list = voiceStickers.sorted { model1, model2 in
@@ -116,17 +116,17 @@ public class BFRecordItemModel: NSObject {
         }
 
         for model in list {
-            if model.startCMTime.seconds > start {
-                let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), duration: CMTime(seconds: model.startCMTime.seconds - start, preferredTimescale: 1000))
+            if CMTimeCompare(model.startCMTime, start) > 0{
+                let range = CMTimeRange(start: start, duration: model.startCMTime - start)
                 dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
             }
             let ind = voiceStickers.firstIndex(of: model)
             let range = CMTimeRange(start: model.startCMTime, end: model.endCMTime)
             dealedDurationRanges.append(SplitRecordRange(isRecord: true, range: range, index: ind ?? -1))
-            start = model.endCMTime.seconds
+            start = model.endCMTime
         }
-        if start < materialDuraion {
-            let range = CMTimeRange(start: CMTime(seconds: start, preferredTimescale: 1000), end: CMTime(seconds: materialDuraion, preferredTimescale: 1000))
+        if CMTimeCompare(start, materialDuraion) < 0 {
+            let range = CMTimeRange(start: start, end: materialDuraion)
             dealedDurationRanges.append(SplitRecordRange(isRecord: false, range: range, index: -1))
         }
     }

+ 23 - 23
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -641,13 +641,13 @@ public class BFRecordScreenController: BFBaseViewController {
                 sself.events.append(event)
 
                 if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
-                    var duration: Double = 0
+                    var duration: CMTime = .zero
                     sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { temp in
                         BFLog(1, message: "录制结束-最终:\(temp.wavFilePath ?? "")-\(temp.startCMTime.seconds)-\(temp.endCMTime.seconds)-\(temp.endCMTime.seconds - temp.startCMTime.seconds)")
                         temp.duration = "\(temp.endCMTime.seconds - temp.startCMTime.seconds)"
-                        duration = duration + (temp.endCMTime.seconds - temp.startCMTime.seconds)
+                        duration = duration + temp.endCMTime - temp.startCMTime
                     }
-                    sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
+                    sself.itemModels[sself.currItemModelIndex].materialDuraion = duration
                     self?.isEndPlay = true
                     // 录制结束显示播放按钮
                     sself.playBtn.isSelected = sself.itemModels[sself.currItemModelIndex].voiceStickers.count <= 0
@@ -1036,7 +1036,7 @@ public class BFRecordScreenController: BFBaseViewController {
             // 注:删除录音后图片素材需要回撤指针进度,同时后面录音往前挪
             if itemModels[currItemModelIndex].mediaType == .IMAGE {
                 let currDuration = model.endCMTime - model.startCMTime
-                itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion - currDuration.seconds
+                itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion - currDuration
                 currentAssetProgress = model.startCMTime
                 // 更新进度
                 resetCurrentProgress()
@@ -1060,7 +1060,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 resetAllIndirectionView()
 
                 // 判断是否无录音了
-                if itemModels[currItemModelIndex].materialDuraion == 0 {
+                if itemModels[currItemModelIndex].materialDuraion == .zero {
                     playBtn.isSelected = true
                 }
             }
@@ -1254,10 +1254,10 @@ public class BFRecordScreenController: BFBaseViewController {
                             itemModels[currItemModelIndex].titleStickers.append(titleTuple)
                         }
                     }
-                    jumpTime = model.startCMTime.seconds
+//                    jumpTime = model.startCMTime.seconds
 
                     if itemModels[currItemModelIndex].mediaType == .IMAGE {
-                        itemModels[currItemModelIndex].materialDuraion = jumpTime
+                        itemModels[currItemModelIndex].materialDuraion = model.startCMTime
                     }
                 }
             } else if action.type == 3 {
@@ -1274,7 +1274,7 @@ public class BFRecordScreenController: BFBaseViewController {
                     tuples?.forEach { tuple in
                         itemModels[currItemModelIndex].voiceStickers.insert(tuple.0, at: tuple.1)
                         let currDuration = tuple.0.endCMTime - tuple.0.startCMTime
-                        itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion + currDuration.seconds
+                        itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].materialDuraion + currDuration
                         for (index, item) in itemModels[currItemModelIndex].voiceStickers.enumerated() {
                             if index > tuple.1 {
                                 // 注:之后的录音和字幕增加对应的时长
@@ -1301,13 +1301,13 @@ public class BFRecordScreenController: BFBaseViewController {
                     }
                 }
                 if itemModels[currItemModelIndex].mediaType == .IMAGE {
-                    itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].voiceStickers.last?.endCMTime.seconds ?? 0
+                    itemModels[currItemModelIndex].materialDuraion = itemModels[currItemModelIndex].voiceStickers.last?.endCMTime ?? .zero
                 }
 
             } else {}
             events.removeLast()
 
-            let dur = itemModels[currItemModelIndex].materialDuraion
+            let dur = itemModels[currItemModelIndex].materialDuraion.seconds
             if dur > 0 {
                 if itemModels[currItemModelIndex].mediaType == .IMAGE {
                     changeProgress(isBack: true, progress: Float(jumpTime))
@@ -1393,7 +1393,7 @@ public class BFRecordScreenController: BFBaseViewController {
         isEndPlay = (progress == 1)
         recorderManager?.voiceModel = nil
         // 视频拖动到最后隐藏录制按钮
-        if itemModels[currItemModelIndex].mediaType == .VIDEO, currentAssetProgress.seconds >= itemModels[currItemModelIndex].materialDuraion {
+        if itemModels[currItemModelIndex].mediaType == .VIDEO, CMTimeCompare(currentAssetProgress, itemModels[currItemModelIndex].materialDuraion) >= 0 {
             recordBtn.isHidden = true
         }
     }
@@ -1695,7 +1695,7 @@ public class BFRecordScreenController: BFBaseViewController {
         }
 
         isNormalPlaying = true
-        if isEndPlay || (itemModels[currItemModelIndex].mediaType == .IMAGE && currentAssetProgress.seconds >= itemModels[currItemModelIndex].materialDuraion) {
+        if isEndPlay || (itemModels[currItemModelIndex].mediaType == .IMAGE && CMTimeCompare(currentAssetProgress, itemModels[currItemModelIndex].materialDuraion) >= 0) {
             isEndPlay = false
             assetPlayer?.seek(to: CMTime.zero)
             progressThumV.progress = 0
@@ -1943,11 +1943,11 @@ public class BFRecordScreenController: BFBaseViewController {
         }
         if itemModels[currItemModelIndex].mediaType == .VIDEO {
             let duration = itemModels[currItemModelIndex].materialDuraion
-            if duration > 0 {
+            if duration.seconds > 0 {
                 if progress == -1 {
                     currentAssetProgress = changCMTime
                 } else {
-                    currentAssetProgress = CMTime(seconds: Double(newProgress) * duration, preferredTimescale: 1000)
+                    currentAssetProgress = CMTime(seconds: Double(newProgress) * duration.seconds, preferredTimescale: 1000)
                 }
                 DispatchQueue.main.async { [weak self] in
                     BFLog(message: "更新录音进度\(#function)- \(self?.currentAssetProgress.seconds ?? 0)")
@@ -1957,12 +1957,12 @@ public class BFRecordScreenController: BFBaseViewController {
                 }
             }
         } else {
-            currentAssetProgress = isBack ? CMTime(value: CMTimeValue(newProgress * 1000), timescale: 1000) : (progress != -1 ? CMTime(value: CMTimeValue(newProgress * Float(itemModels[currItemModelIndex].materialDuraion) * 1000), timescale: 1000) : changCMTime)
+            currentAssetProgress = isBack ? CMTime(value: CMTimeValue(newProgress * 1000), timescale: 1000) : (progress != -1 ? CMTime(value: CMTimeValue(newProgress * Float(itemModels[currItemModelIndex].materialDuraion.seconds) * 1000), timescale: 1000) : changCMTime)
             DispatchQueue.main.async { [weak self] in
                 BFLog(message: "更新录音进度\(#function)- \(self?.currentAssetProgress.seconds ?? 0)")
                 self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
             }
-            BFLog(message: "progress = \(progress),currentAssetProgress = \(currentAssetProgress.seconds),materialDuraion = \(itemModels[currItemModelIndex].materialDuraion)")
+            BFLog(message: "progress = \(progress),currentAssetProgress = \(currentAssetProgress.seconds),materialDuraion = \(itemModels[currItemModelIndex].materialDuraion.seconds)")
 
             // add by ak 图片素材拖动指针时同时更新字幕显现数据
             updateSubtitle(time: currentAssetProgress)
@@ -1976,9 +1976,9 @@ public class BFRecordScreenController: BFBaseViewController {
             if itemModels[currItemModelIndex].mediaType == .IMAGE {
                 percenWidth = progressThumV.thumbImageWidth / 2.0
             } else {
-                percenWidth = progressThumV.progessIndicateBackV.frame.width / CGFloat(itemModels[currItemModelIndex].materialDuraion)
+                percenWidth = progressThumV.progessIndicateBackV.frame.width / CGFloat(itemModels[currItemModelIndex].materialDuraion.seconds)
             }
-            indirectionView = BFIndirectionProgressView(frame: CGRect(origin: progressThumV.progessIndicateBackV.origin, size: CGSize(width: progressThumV.progressView.contentSize.width, height: progressThumV.progessIndicateBackV.frame.height)), percenWidth: percenWidth, totalDuration: itemModels[currItemModelIndex].materialDuraion)
+            indirectionView = BFIndirectionProgressView(frame: CGRect(origin: progressThumV.progessIndicateBackV.origin, size: CGSize(width: progressThumV.progressView.contentSize.width, height: progressThumV.progessIndicateBackV.frame.height)), percenWidth: percenWidth, totalDuration: itemModels[currItemModelIndex].materialDuraion.seconds)
             progressThumV.progressView.addSubview((indirectionView)!)
         }
         // 更新录制进度
@@ -2192,7 +2192,7 @@ public extension BFRecordScreenController {
     func startPlayRecord(time: CMTime) {
         // 播放对应的录音音频
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
-            if itemModels[currItemModelIndex].materialDuraion <= 0 {
+            if itemModels[currItemModelIndex].materialDuraion.seconds <= 0 {
                 playBtn.isSelected = true
                 return
             }
@@ -2214,7 +2214,7 @@ public extension BFRecordScreenController {
                     self?.isEndPlay = true
                     self?.pause()
                     // 注:矫正进度--播放结束后当前指针应该到当前素材总时长
-                    self?.currentAssetProgress = CMTime(seconds: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0, preferredTimescale: 1000)
+                    self?.currentAssetProgress = CMTime(seconds: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion.seconds ?? 0, preferredTimescale: 1000)
                     self?.resetCurrentProgress()
                     // 录制播放结束后显示录制按钮
                     self?.recordBtn.isHidden = false
@@ -2251,7 +2251,7 @@ public extension BFRecordScreenController {
     /// 重置进度
     func resetCurrentProgress() {
         DispatchQueue.main.async { [weak self] in
-            if !(self?.isRecording ?? false), (self?.currentAssetProgress.seconds ?? 0) > (self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0) {
+            if !(self?.isRecording ?? false), CMTimeCompare((self?.currentAssetProgress ?? .zero), (self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? .zero)) > 0 {
                 return
             }
 //            BFLog(1, message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
@@ -2268,8 +2268,8 @@ public extension BFRecordScreenController {
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
             percenWidth = progressThumV.thumbImageWidth / 2.0
         } else {
-            percenWidth = progressThumV.progessIndicateBackV.frame.width / CGFloat(itemModels[currItemModelIndex].materialDuraion)
+            percenWidth = progressThumV.progessIndicateBackV.frame.width / CGFloat(itemModels[currItemModelIndex].materialDuraion.seconds)
         }
-        indirectionView?.resetAllSubViews(items: itemModels[currItemModelIndex].voiceStickers, percenWidth: percenWidth, totalDuration: itemModels[currItemModelIndex].materialDuraion)
+        indirectionView?.resetAllSubViews(items: itemModels[currItemModelIndex].voiceStickers, percenWidth: percenWidth, totalDuration: itemModels[currItemModelIndex].materialDuraion.seconds)
     }
 }

+ 8 - 8
BFRecordScreenKit/Classes/RecordScreen/View/BFVideoThumbProgressView.swift

@@ -241,7 +241,7 @@ class BFVideoThumbProgressView: UIView {
             return
         }
         if recordItem?.mediaType == .VIDEO {
-            if let second = recordItem?.materialDuraion, second > 0 {
+            if let second = recordItem?.materialDuraion.seconds, second > 0 {
                 let w = progressView.contentSize.width - width
                 BFLog(message: "录音进度--指示器:progress=\(progress),duration=\(second),w=\(w),perW=\(Double(w) / second),totalW:\(progress * Double(w) / second)")
                 progressView.contentOffset = CGPoint(x: progress * Double(w) / second, y: 0)
@@ -258,15 +258,15 @@ class BFVideoThumbProgressView: UIView {
 
 extension BFVideoThumbProgressView: UIScrollViewDelegate {
     func scrollViewDidScroll(_ scrollView: UIScrollView) {
-        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0)
+        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0)
         if recordItem?.mediaType == .VIDEO {
             if isDrag {
                 dragScrollProgressHandle?(false, totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
             }
         } else if recordItem?.mediaType == .IMAGE {
             if isDrag {
-                if scrollView.contentOffset.x > ((CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0) + 0.34) {
-                    scrollView.contentOffset = CGPoint(x: (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0) + 0.34, y: 0)
+                if scrollView.contentOffset.x > ((CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0) + 0.34) {
+                    scrollView.contentOffset = CGPoint(x: (CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0) + 0.34, y: 0)
                 }
                 dragScrollProgressHandle?(false, totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
             }
@@ -275,22 +275,22 @@ extension BFVideoThumbProgressView: UIScrollViewDelegate {
 
     func scrollViewWillBeginDragging(_ scrollView: UIScrollView) {
         isDrag = true
-        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0)
+        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0)
         dragStartHandle?()
         dragScrollProgressHandle?(true, totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
     }
 
     func scrollViewDidEndDragging(_ scrollView: UIScrollView, willDecelerate decelerate: Bool) {
         if !decelerate {
-            let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0)
+            let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0)
             isDrag = false
             dragEndHandle?(totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
         }
     }
 
     func scrollViewDidEndDecelerating(_ scrollView: UIScrollView) {
-        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0)
+        let totalW = recordItem?.mediaType == .VIDEO ? (scrollView.contentSize.width - width) : (CGFloat(recordItem?.materialDuraion.seconds ?? 0) * thumbImageWidth / 2.0)
         isDrag = false
-        dragEndHandle?(totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
+        dragEndHandle?(totalW > 0 ? min(Float(scrollView.contentOffset.x / totalW), 1) : 0)
     }
 }