Browse Source

像素点增加0.34避免出现不足1的进度

harry 3 years ago
parent
commit
3b98043b2b

+ 29 - 23
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -69,11 +69,12 @@ public class BFRecordScreenController: BFBaseViewController {
         }
     }
 
-    var currentAssetProgress: CMTime = .zero {
-        didSet {
-            BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
-        }
-    } // 当前素材播放的进度
+    var currentAssetProgress: CMTime = .zero
+//    {
+//        didSet {
+//            BFLog(3, message: "currentAssetProgress=\(currentAssetProgress.seconds)")
+//        }
+//    } // 当前素材播放的进度
     // 播放器开始播放时间
     var recordStartPlayTime: CMTime = .zero
     // 某个录音开始播放时间
@@ -1401,7 +1402,8 @@ public class BFRecordScreenController: BFBaseViewController {
     func searchStopAtRecordRange(needAdsorb: Bool = false) {
         // TODO: 滑动,播放暂停,撤销时,判断是否停止录音区间,是则删除相关录音,画笔,头像,字幕
         let elems = itemModels[currItemModelIndex].voiceStickers.enumerated().filter { elem in
-            elem.1.startCMTime.seconds <= self.currentAssetProgress.seconds && (elem.1.endCMTime.seconds - 0.001) > self.currentAssetProgress.seconds
+            CMTimeCompare(elem.1.startCMTime, currentAssetProgress) <= 0 && elem.1.endCMTime > currentAssetProgress
+//            elem.1.startCMTime.seconds <= self.currentAssetProgress.seconds && (elem.1.endCMTime.seconds - 0.001) > self.currentAssetProgress.seconds
         }
 
         isEndPlay = false
@@ -1409,7 +1411,7 @@ public class BFRecordScreenController: BFBaseViewController {
         if elems.count > 0 {
             //  TODO: 停在了录音区间,显示删除按钮
             if needAdsorb {
-                if fabs(elems[0].1.endCMTime.seconds - currentAssetProgress.seconds) < 0.5 {
+                if fabs((elems[0].1.endCMTime - currentAssetProgress).seconds) < 0.5 {
                     BFLog(1, message: "吸附在录音结尾, \(elems[0].1.endCMTime.seconds)")
                     //                changeWithDrawBtnLayout(false)
                     changeProgress(changCMTime: elems[0].1.endCMTime)
@@ -1422,7 +1424,7 @@ public class BFRecordScreenController: BFBaseViewController {
                     searchStopAtRecordRange(needAdsorb: false)
                     return
                 } else {
-                    if fabs(elems[0].1.startCMTime.seconds - currentAssetProgress.seconds) < 0.5 {
+                    if fabs((elems[0].1.startCMTime - currentAssetProgress).seconds) < 0.5 {
                         BFLog(1, message: "吸附在录音开始")
                         //                    changeWithDrawBtnLayout(true)
                         changeProgress(changCMTime: elems[0].1.startCMTime)
@@ -1536,10 +1538,10 @@ public class BFRecordScreenController: BFBaseViewController {
 
         // 先排序,再查找下一个需要播放的录音
         let list = itemModels[currItemModelIndex].voiceStickers.sorted { m1, m2 in
-            m1.startCMTime.seconds < m2.startCMTime.seconds
+            CMTimeCompare(m1.startCMTime, m2.startCMTime) < 0
         }
         let (shouldPlayRecordIndex, recordedAudio) = list.enumerated().first { model in
-            model.1.endCMTime.seconds > CMTimeGetSeconds(currentT)
+            CMTimeCompare(model.1.endCMTime, currentT) > 0
         } ?? (-1, nil)
 
         // 没找到,代表后边没有录音需求了
@@ -1634,9 +1636,9 @@ public class BFRecordScreenController: BFBaseViewController {
                 // 应当开始播放了
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 if currentPlayRecordIndex == -1, sself.isNormalPlaying {
-                    let second = CMTimeGetSeconds(currentT) - recordedAudio.startCMTime.seconds
-                    DispatchQueue.main.async { [weak sself] in
-                        self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second * 1_000_000), timescale: 1_000_000), toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
+                    let second = currentT - recordedAudio.startCMTime
+                    DispatchQueue.main.async { [weak self] in
+                        self?.recordPlayer?.seek(to: second, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
                             if finished, self?.isNormalPlaying ?? false {
                                 self?.recordPlayer?.play()
 //                                self?.recordPlayer?.volume = 1
@@ -1667,14 +1669,17 @@ public class BFRecordScreenController: BFBaseViewController {
 //            BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(sself.isNormalPlaying),\(recordPlayer?.currentItem?.duration.timescale ?? 0),\(CMTimeGetSeconds(currentT) >= recordedAudio.startCMTime.seconds),\(CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds - 0.2)")
 
             if CMTimeGetSeconds(currentT) >= (recordedAudio.startCMTime.seconds - 0.1),
-               CMTimeGetSeconds(currentT) <= recordedAudio.endCMTime.seconds // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
+               CMTimeCompare(currentT, recordedAudio.endCMTime) <= 0 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
             {
                 // 应当开始播放了
                 // 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
                 if sself.isNormalPlaying {
-                    let second = CMTimeGetSeconds(currentT) - recordedAudio.startCMTime.seconds
-                    sself.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second * 1_000_000), timescale: 1_000_000))
-                    sself.recordPlayer?.play()
+                    let second = currentT - recordedAudio.startCMTime
+                    sself.recordPlayer?.seek(to: second, toleranceBefore: CMTime(seconds: 1, preferredTimescale: 1000), toleranceAfter: CMTime(seconds: 1, preferredTimescale: 1000), completionHandler: { isFinished in
+                        if isFinished {
+                            sself.recordPlayer?.play()
+                        }
+                    })
                     BFLog(3, message: "录音开始播放2, \(second), \(CMTimeGetSeconds(recordPlayer?.currentItem?.duration ?? .zero)),index = \(currentPlayRecordIndex)")
                 }
             }
@@ -1889,7 +1894,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
         if isRecording {
             let startTime = recorderManager?.voiceModel?.startCMTime.seconds ?? 0
-            let progress = currentAssetProgress.seconds - startTime // - ratioX
+            let progress = (currentAssetProgress - (recorderManager?.voiceModel?.startCMTime ?? .zero)).seconds // - ratioX
             // 使用播放器的进度来画线,因为进度是跟着播放器来了
             DispatchQueue.main.async { [weak self] in
                 self?.indirectionView?.setProgress(start: startTime, progress: max(0, progress))
@@ -2204,7 +2209,8 @@ public extension BFRecordScreenController {
         }, didPlayToEndTime: { [weak self] recordInfo, currentItem in
             BFLog(message: "播放录音结束:\(String(describing: recordInfo?.1)),\(String(describing: currentItem))")
             if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
-                if (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.count ?? 0) <= ((recordInfo?.0 ?? 0) + 1) || (recordInfo?.1.endCMTime.seconds ?? 0) >= (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endCMTime.seconds ?? 0) {
+                if (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.count ?? 0) <= ((recordInfo?.0 ?? 0) + 1) || CMTimeCompare((recordInfo?.1.endCMTime ?? .zero), (self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.endCMTime ?? .zero)) >= 0 {
+                    
                     self?.isEndPlay = true
                     self?.pause()
                     // 注:矫正进度--播放结束后当前指针应该到当前素材总时长
@@ -2215,7 +2221,7 @@ public extension BFRecordScreenController {
                     self?.recordBtn.alpha = 1
                 } else {
                     // 注:矫正进度--一段录音播放结束后当前指针应该到当前录音结束点
-                    self?.currentAssetProgress = CMTime(seconds: self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers[(recordInfo?.0 ?? 0) + 1].startCMTime.seconds ?? 0, preferredTimescale: 1000)
+                    self?.currentAssetProgress = self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers[(recordInfo?.0 ?? 0) + 1].startCMTime ?? .zero
                     self?.startPlayRecord(time: self?.currentAssetProgress ?? CMTime.zero)
                 }
             }
@@ -2231,11 +2237,11 @@ public extension BFRecordScreenController {
     /// 处理图片素材录音
     func imageRecordProgress(isRecord: Bool = false, progress: Float64) {
         if isRecord {
-            currentAssetProgress = CMTime(seconds: (recorderManager?.voiceModel?.startCMTime.seconds ?? 0) + progress, preferredTimescale: 1000)
+            currentAssetProgress = (recorderManager?.voiceModel?.startCMTime ?? .zero) + CMTime(seconds: progress, preferredTimescale: 1000)
         } else {
-            currentAssetProgress = CMTime(seconds: recordStartPlayTime.seconds + progress, preferredTimescale: 1000)
+            currentAssetProgress = recordStartPlayTime + CMTime(seconds: progress, preferredTimescale: 1000)
         }
-        BFLog(message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress.seconds),\(itemModels[currItemModelIndex].materialDuraion)")
+        BFLog(1, message: "图片录音进度:\(progress),currentAssetProgress=\(currentAssetProgress.seconds),\(itemModels[currItemModelIndex].materialDuraion)")
         if itemModels[currItemModelIndex].mediaType == .IMAGE {
             /// 重置进度
             resetCurrentProgress()

+ 2 - 2
BFRecordScreenKit/Classes/RecordScreen/View/BFVideoThumbProgressView.swift

@@ -265,8 +265,8 @@ extension BFVideoThumbProgressView: UIScrollViewDelegate {
             }
         } else if recordItem?.mediaType == .IMAGE {
             if isDrag {
-                if scrollView.contentOffset.x > CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0 {
-                    scrollView.contentOffset = CGPoint(x: CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0, y: 0)
+                if scrollView.contentOffset.x > ((CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0) + 0.34) {
+                    scrollView.contentOffset = CGPoint(x: (CGFloat(recordItem?.materialDuraion ?? 0) * thumbImageWidth / 2.0) + 0.34, y: 0)
                 }
                 dragScrollProgressHandle?(false, totalW > 0 ? Float(scrollView.contentOffset.x / totalW) : 0)
             }