فهرست منبع

weak self 设置

harry 3 سال پیش
والد
کامیت
d6908bf61a

+ 3 - 1
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenBaseManager.swift

@@ -77,7 +77,9 @@ class BFRecordScreenBaseManager : NSObject{
     }
     
     func clean(){
-        GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
+        DispatchQueue.global().async {
+            GPUImageContext.sharedFramebufferCache().purgeAllUnassignedFramebuffers()
+        }
     }
     
     deinit {

+ 118 - 78
BFRecordScreenKit/Classes/RecordScreen/Controller/BFRecordScreenController.swift

@@ -428,15 +428,17 @@ public class BFRecordScreenController: BFBaseViewController {
         audioSettingView.noSpeakSlider.valueIsInt = true
         audioSettingView.callBack = { [weak self] haveSpeak, noHaveSpeak in
 //            BFLog(1, message: "haveSpeak is:\(haveSpeak),noHaveSpeak is:\(noHaveSpeak)")
-            self?.haveSpeakVolume = haveSpeak / 100.0
-            self?.noSpeakVolume = noHaveSpeak / 100.0
-            if !(self?.isNormalPlaying ?? false), !(self?.isRecording ?? false) {
-                if self?.deleteRecordBtn.isHidden ?? false {
-//                    self?.recordPlayer.volume = 0
-                    self?.assetPlayer.volume = self?.noSpeakVolume ?? 1.0
+            guard let wself = self else { return }
+
+            wself.haveSpeakVolume = haveSpeak / 100.0
+            wself.noSpeakVolume = noHaveSpeak / 100.0
+            if !wself.isNormalPlaying, !wself.isRecording {
+                if wself.deleteRecordBtn.isHidden {
+//                    wself.recordPlayer.volume = 0
+                    wself.assetPlayer.volume = wself.noSpeakVolume
                 } else {
-//                    self?.recordPlayer.volume = 1.0
-                    self?.assetPlayer.volume = self?.haveSpeakVolume ?? 0.0
+//                    wself.recordPlayer.volume = 1.0
+                    wself.assetPlayer.volume = wself.haveSpeakVolume
                 }
             }
         }
@@ -507,11 +509,11 @@ public class BFRecordScreenController: BFBaseViewController {
     deinit {
         NotificationCenter.default.removeObserver(self)
         if avplayerTimeObserver != nil {
-            assetPlayer.removeTimeObserver(avplayerTimeObserver)
+            assetPlayer.removeTimeObserver(avplayerTimeObserver as Any)
             avplayerTimeObserver?.invalidate()
         }
         if recordPlayerTimeObserver != nil {
-            recordPlayer.removeTimeObserver(recordPlayerTimeObserver)
+            recordPlayer.removeTimeObserver(recordPlayerTimeObserver as Any)
             recordPlayerTimeObserver?.invalidate()            
         }
         
@@ -579,9 +581,11 @@ public class BFRecordScreenController: BFBaseViewController {
 
         // add by ak 取 nsl token
         BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
+            guard let wself = self else { return }
+            
             BFLog(message: "nls appkey is \(appkey), token is \(token)")
-            self?.NeoNuiAPPID = appkey
-            self?.NeoNuiToken = token
+            wself.NeoNuiAPPID = appkey
+            wself.NeoNuiToken = token
         }
         
         initlizeRecordManager()
@@ -639,21 +643,22 @@ public class BFRecordScreenController: BFBaseViewController {
         }
         // 编辑字幕完成
         subtitleEditView.editSubtitleDone = { [weak self] newtext, index in
+            guard let wself = self else { return }
 
             // 更新缓存数据
-            if index < (self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.count ?? 0) {
+            if index < (wself.itemModels[wself.currItemModelIndex ].titleStickers.count ) {
                 if newtext.count == 0 {
                     // 删除数据
                     BFLog(message: "清空字幕操作要删除原字幕 sticker 数据")
-                    self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.remove(at: index)
-                    self?.subtitleLabel.text = ""
+                    wself.itemModels[wself.currItemModelIndex ].titleStickers.remove(at: index)
+                    wself.subtitleLabel.text = ""
                 } else {
                     // 更新数据
-                    self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers[index].text = newtext
+                    wself.itemModels[wself.currItemModelIndex ].titleStickers[index].text = newtext
                 }
             }
 
-            self?.setSubtitleStyle(settingModel: (self?.subtitleSettingView.subtitle.setting)!)
+            wself.setSubtitleStyle(settingModel: wself.subtitleSettingView.subtitle.setting)
         }
 
         layoutsubview()
@@ -749,14 +754,17 @@ public class BFRecordScreenController: BFBaseViewController {
         recorderManager = BFVoiceRecordManager()
         // 录音进度
         recorderManager?.recorderProgrossHandle = { [weak self] progress in
-            self?.drawProgressIndication(progress: (progress.isNaN || progress.isInfinite) ? 0 : progress)
+            guard let wself = self else { return }
+            wself.drawProgressIndication(progress: (progress.isNaN || progress.isInfinite) ? 0 : progress)
         }
 
         // MARK: 录音字幕回调
         recorderManager?.subtitleRecordHandle = { [weak self] eventCode, recordId, asrResult, audioFilePath, _ in
 //            BFLog(1, message: "eventcode:\(eventCode), recordid:\(String(describing: recordId)), asr:\(String(describing: asrResult))")
-            
+
             DispatchQueue.global().async { [weak self] in
+                guard let wself = self else { return }
+
                 let newSubtitle = PQEditSubTitleModel()
                 newSubtitle.recordId = recordId
 
@@ -774,13 +782,13 @@ public class BFRecordScreenController: BFBaseViewController {
                 let dicResult: [String: Any]? = jsonStringToDictionary(asrResult!)
                 let payload = dicResult?["payload"] as? [String: Any]
 //                let header = dicResult?["header"] as? [String: Any]
-//                BFLog(1, message: "onNuiEventCallback event 识别结果:) \(payload?["result"] ?? "") ,taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\(((payload?["begin_time"]) as? Int) ?? 0) ~ \(((payload?["time"]) as? Int) ?? 0) startTime:\(self?.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
+//                BFLog(1, message: "onNuiEventCallback event 识别结果:) \(payload?["result"] ?? "") ,taskId:\((header?["task_id"] as? String) ?? "taskId"), 识别时间:\(((payload?["begin_time"]) as? Int) ?? 0) ~ \(((payload?["time"]) as? Int) ?? 0) startTime:\(wself.recorderManager?.voiceModel?.startCMTime.seconds ?? 0.0)")
                 // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startCMTime.seconds 为开始的录制的时间,开始和结束都要加上这个时差
                 // 这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
                 var tempVoice: PQVoiceModel?
                 var tempItem: BFRecordItemModel?
                 // 1:先通过titleTaskId来找是否存在录音
-                self?.itemModels.enumerated().forEach { index, item in
+                wself.itemModels.enumerated().forEach { index, item in
                     if tempVoice == nil {
                         tempVoice = item.voiceStickers.first { voice in
                             voice.recordId == recordId
@@ -792,19 +800,20 @@ public class BFRecordScreenController: BFBaseViewController {
                     }
                 }
                 // 2:如果通过titleTaskId没找到录音文件,则通过
-                if tempVoice == nil && (self?.isRecording ?? false) && self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last?.recordId == nil {
+                if tempVoice == nil && (wself.isRecording ) && wself.itemModels[wself.currItemModelIndex ].voiceStickers.last?.recordId == nil {
 //                    BFLog(3, message: "字幕回调-如果通过titleTaskId没找到录音文件:taskID=\(recordId ),audioFilePath=\(audioFilePath ?? "")")
-                    tempVoice = self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last
+                    tempVoice = wself.itemModels[wself.currItemModelIndex ].voiceStickers.last
                 }
                 // 3:如果通过titleTaskId跟audioFilePath都没找到录音文件,则默认为recorderManager?.voiceModel
-                guard let currentVoice = (tempVoice ?? self?.recorderManager?.voiceModel) ?? self?.itemModels[self?.currItemModelIndex ?? 0].voiceStickers.last else {
-                    BFLog(3, message: "字幕回调-最终没找到录音文件:taskID=\(recordId ),audioFilePath=\(audioFilePath ?? "")")
+                guard let currentVoice = (tempVoice ?? wself.recorderManager?.voiceModel) ?? wself.itemModels[wself.currItemModelIndex ].voiceStickers.last else {
+//                    BFLog(3, message: "字幕回调-最终没找到录音文件:taskID=\(recordId ?? "recordId" ),audioFilePath=\(audioFilePath ?? "")")
                     return
                 }
-                guard let currentItem = tempItem ?? self?.itemModels[self?.currItemModelIndex ?? 0] else {
-//                    BFLog(3, message: "字幕回调-最终没找到录音文件:taskID=\(recordId ),audioFilePath=\(audioFilePath ?? "")")
-                    return
+                
+                if tempItem == nil {
+                    tempItem = wself.itemModels[wself.currItemModelIndex]
                 }
+
                 newSubtitle.timelineIn = currentVoice.startCMTime + CMTime(seconds: Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0, preferredTimescale: 1000)
                 newSubtitle.timelineOut = currentVoice.startCMTime + CMTime(seconds: Float64((((payload?["time"]) as? Int) ?? 0) - 10) / 1000.0, preferredTimescale: 1000)
                 newSubtitle.audioFilePath = currentVoice.wavFilePath
@@ -817,14 +826,17 @@ public class BFRecordScreenController: BFBaseViewController {
                 newSubtitle.text = showText
 //                newSubtitle.audioFilePath = audioFilePath ?? ""
                 BFLog(1, message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn.seconds) timelineOut \(newSubtitle.timelineOut.seconds) text: \(newSubtitle.text)")
-                newSubtitle.setting = self?.subtitleSettingView.subtitle.setting ?? BFSubTitileSettingModel()
-                currentItem.titleStickers.append(newSubtitle)
+                newSubtitle.setting = wself.subtitleSettingView.subtitle.setting
+                tempItem?.titleStickers.append(newSubtitle)
             }
         }
 
         // MARK: 录音结束
         recorderManager?.endRecordHandle = { [weak self, weak recorderManager] voideModel, _ in
-            if let wself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
+            
+            guard let wself = self else { return }
+            
+            if let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
                 // 加入到语音数组里
 
                 model.endCMTime = wself.currentAssetProgress
@@ -861,7 +873,7 @@ public class BFRecordScreenController: BFBaseViewController {
                 BFLog(1, message: "添加录音文件:\(model.startCMTime.seconds) -- \(model.endCMTime.seconds)")
                 wself.itemModels[wself.currItemModelIndex].voiceStickers.append(model)
                 // 录制结束回调
-                self?.recordEndHandle?(model)
+                wself.recordEndHandle?(model)
                 // 如果是图片素材同时有需要删除的录音时需要调整录音文件开始结束时间
                 // warning: 图片录制的时候应该只能在结尾处录制
                 if wself.itemModels[wself.currItemModelIndex].mediaType == .Image {
@@ -906,18 +918,20 @@ public class BFRecordScreenController: BFBaseViewController {
                     wself.playBtn.isHidden = wself.playBtn.isSelected
                 }
                 DispatchQueue.main.async { [weak self] in
+                    guard let wself = self else { return }
+
                     // 录音完,重绘撤销按钮,更新录音按钮,
-                    self?.changeWithDrawBtnLayout(2)
+                    wself.changeWithDrawBtnLayout(2)
                     // 注:在录制结束时矫正当前位置,避免跟指针无法对其
-                    self?.indirectionView?.resetCurrentItem(start: model.startCMTime.seconds, end: model.endCMTime.seconds)
+                    wself.indirectionView?.resetCurrentItem(start: model.startCMTime.seconds, end: model.endCMTime.seconds)
                     if deletedVoices.count > 0 {
                         /// 重绘录音进度视图
-                        self?.resetAllIndirectionView()
+                        wself.resetAllIndirectionView()
                     }
                     // 矫正进度
-                    self?.resetCurrentProgress()
-                    self?.deleteRecordBtn.isHidden = true
-                    self?.recordBtn.isHidden = (self?.itemModels[self?.currItemModelIndex ?? 0].mediaType != .Video && (self?.isEndPlay ?? false)) ? false : (self?.isEndPlay ?? false)
+                    wself.resetCurrentProgress()
+                    wself.deleteRecordBtn.isHidden = true
+                    wself.recordBtn.isHidden = (wself.itemModels[wself.currItemModelIndex ?? 0].mediaType != .Video && (wself.isEndPlay ?? false)) ? false : (wself.isEndPlay ?? false)
                 }
                 wself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
                 BFLog(3, message: "重置播放index-\(#function) = \(wself.currentPlayRecordIndex)")
@@ -927,22 +941,27 @@ public class BFRecordScreenController: BFBaseViewController {
             }
         }
         recorderManager?.cancelRecordHandle = { [weak self] voiceModel in
+            guard let wself = self else { return }
             // 取消录制
-            self?.recordManagerCancelRecord(voiceModel: voiceModel)
+            wself.recordManagerCancelRecord(voiceModel: voiceModel)
         }
 
         recorderManager?.NeoNuiDebugHandle = { [weak self] msg,isShow in
-            self?.neoNuiDebugLabel.text = "字幕服务:\(msg ?? "")"
+            
+            guard let wself = self else { return }
+            
+            wself.neoNuiDebugLabel.text = "字幕服务:\(msg ?? "")"
             if(isShow){
-                cShowHUB(superView: self?.view, msg: msg)
+                cShowHUB(superView: wself.view, msg: msg)
             }
         
         }
 
         recorderManager?.AudioQueueRecoderDebugHandle = { [weak self] msg,isShow in
-            self?.audioQueueRecoderLabel.text = "录音机:\(msg ?? "")"
+            guard let wself = self else { return }
+            wself.audioQueueRecoderLabel.text = "录音机:\(msg ?? "")"
             if(isShow){
-                cShowHUB(superView: self?.view, msg: msg)
+                cShowHUB(superView: wself.view, msg: msg)
             }
         }
 
@@ -1220,7 +1239,6 @@ public class BFRecordScreenController: BFBaseViewController {
         }
 
         DispatchQueue.global().async {[weak self] in
-            
             guard let wself = self else { return }
             
             wself.recorderManager?.cancelTitleService()
@@ -1266,8 +1284,10 @@ public class BFRecordScreenController: BFBaseViewController {
 
         BFLog(1, message: "停止录音- \(currentAssetProgress.seconds)")
         recordBtn.isEnabled = false
-        DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) { [weak self] in
-            self?.recordBtn.isEnabled = true
+        DispatchQueue.main.asyncAfter(deadline: .now() + 0.25) {[weak self] in
+            guard let wself = self else { return }
+            
+            wself.recordBtn.isEnabled = true
         }
 
         rscurrentManager.endRecord()
@@ -1815,12 +1835,12 @@ public class BFRecordScreenController: BFBaseViewController {
                 if currentPlayRecordIndex == -1, wself.isNormalPlaying {
                     let second = currentT - recordedAudio.startCMTime
                     DispatchQueue.main.async { [weak self] in
-                        self?.recordPlayer.seek(to: second, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
-                            if finished, self?.isNormalPlaying ?? false {
-                                self?.recordPlayer.play()
-//                                self?.recordPlayer.volume = 1
-//                                self?.assetPlayer.volume = self?.haveSpeakVolume ?? 0
-                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer.currentItem?.duration ?? .zero)), \(self?.recordPlayer.currentItem?.currentTime().seconds ?? 0)")
+                        wself.recordPlayer.seek(to: second, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000), completionHandler: { [weak self] finished in
+                            if finished, wself.isNormalPlaying ?? false {
+                                wself.recordPlayer.play()
+//                                wself.recordPlayer.volume = 1
+//                                wself.assetPlayer.volume = wself.haveSpeakVolume ?? 0
+                                BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(wself.recordPlayer.currentItem?.duration ?? .zero)), \(wself.recordPlayer.currentItem?.currentTime().seconds ?? 0)")
                             }
                         })
                     }
@@ -2006,7 +2026,7 @@ public class BFRecordScreenController: BFBaseViewController {
 
             // 非录音的播放状态,结束时自动跳下一段落
             if !wself.isRecording, wself.currItemModelIndex < (wself.itemModels.count - 1) {
-//                self?.collectionView.setContentOffset(CGPoint(x: CGFloat((self?.currItemModelIndex ?? 0) + 1) * (self?.collectionView.frame.width ?? 0), y: 0), animated: true)
+//                wself.collectionView.setContentOffset(CGPoint(x: CGFloat((wself.currItemModelIndex ?? 0) + 1) * (wself.collectionView.frame.width ?? 0), y: 0), animated: true)
             } else {} //
 
             wself.isNormalPlaying = false
@@ -2037,15 +2057,17 @@ public class BFRecordScreenController: BFBaseViewController {
                 currentAssetProgress = CMTime(seconds: time.seconds, preferredTimescale: 1000)
 //                BFLog(1, message: "video curr:\(CMTimeGetSeconds(currentAssetProgress))")
                 DispatchQueue.main.async { [weak self] in
-                    BFLog(message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
-                    self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(time).formatDurationToHMS())
-                    let su = !(self?.isDragingProgressSlder ?? false) || (self?.isRecording ?? false) || (self?.isNormalPlaying ?? false)
+                    guard let wself = self else { return }
+                    
+                    BFLog(message: "更新录音进度\(#function)-\(wself.currentAssetProgress.seconds )")
+                    wself.progreddL.text = String(format: "%@", CMTimeGetSeconds(time).formatDurationToHMS())
+                    let su = !wself.isDragingProgressSlder || wself.isRecording || wself.isNormalPlaying
                     if su { // 不拖动,正常播放和录音时更新进度条
-                        self?.progressThumV.progress = time.seconds
+                        wself.progressThumV.progress = time.seconds
                     }
                     // 更新字幕
-                    if !(self?.isRecording ?? false) {
-                        self?.updateSubtitle(time: time)
+                    if !wself.isRecording {
+                        wself.updateSubtitle(time: time)
                     }
                 }
             }
@@ -2056,7 +2078,9 @@ public class BFRecordScreenController: BFBaseViewController {
             let progress = (currentAssetProgress - (recorderManager?.voiceModel?.startCMTime ?? .zero)).seconds // - ratioX
             // 使用播放器的进度来画线,因为进度是跟着播放器来了
             DispatchQueue.main.async { [weak self] in
-                self?.indirectionView?.setProgress(start: startTime, progress: max(0, progress))
+                guard let wself = self else { return }
+                
+                wself.indirectionView?.setProgress(start: startTime, progress: max(0, progress))
             }
         }
     }
@@ -2097,8 +2121,10 @@ public class BFRecordScreenController: BFBaseViewController {
                     currentAssetProgress = CMTime(seconds: Double(newProgress) * duration.seconds, preferredTimescale: 1000)
                 }
                 DispatchQueue.main.async { [weak self] in
-                    BFLog(message: "更新录音进度\(#function)- \(self?.currentAssetProgress.seconds ?? 0)")
-                    self!.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
+                    guard let wself = self else { return }
+                    
+                    BFLog(message: "更新录音进度\(#function)- \(wself.currentAssetProgress.seconds)")
+                    wself.progreddL.text = String(format: "%@", CMTimeGetSeconds(wself.currentAssetProgress).formatDurationToHMS())
                 }
                 assetPlayer.seek(to: currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1_000_000), toleranceAfter: CMTime(value: 1, timescale: 1_000_000)) { _ in
                 }
@@ -2120,7 +2146,7 @@ public class BFRecordScreenController: BFBaseViewController {
             DispatchQueue.main.async { [weak self] in
                 guard let wself = self else { return }
                 
-                BFLog(message: "更新录音进度\(#function)- \(self?.currentAssetProgress.seconds ?? 0)")
+                BFLog(message: "更新录音进度\(#function)- \(wself.currentAssetProgress.seconds ?? 0)")
                 wself.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
             }
             BFLog(message: "progress = \(progress),currentAssetProgress = \(currentAssetProgress.seconds),materialDuraion = \(itemModels[currItemModelIndex].materialDuraion.seconds)")
@@ -2288,8 +2314,10 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
     
     func removeCameroCell(){
         itemModels.remove(at: currItemModelIndex)
-        UIView.performWithoutAnimation {
-            collectionView.deleteItems(at: [IndexPath(row: currItemModelIndex, section: 0)])
+        UIView.performWithoutAnimation {[weak self] in
+            guard let wself = self else { return }
+            
+            wself.collectionView.deleteItems(at: [IndexPath(row: wself.currItemModelIndex, section: 0)])
         }
         currItemModelIndex -= 1
     }
@@ -2317,10 +2345,12 @@ extension BFRecordScreenController: UICollectionViewDelegate, UICollectionViewDa
             // 重绘录音区域
             progressThumV.recordItem = recordItem
             DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in
+                guard let wself = self else { return }
+                
                 /// 重绘录音进度视图
-                self?.resetAllIndirectionView()
+                wself.resetAllIndirectionView()
                 // 重绘录音进度
-                self?.recordBtn.isEnabled = true
+                wself.recordBtn.isEnabled = true
             }
             // 更新缩略图
             //            progressThumV.isHidden = false
@@ -2384,9 +2414,12 @@ public extension BFRecordScreenController {
             currenStartPlayTime = CMTime.zero
         }
         playRecord(at: time, periodicTimeObserver: { [weak self] currentT, _ in
+            
+            guard let wself = self else { return }
+            
 //            BFLog(1, message: "播放录音进度:\(currentT.seconds),\(currentItem)")
-            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .Image, self?.isNormalPlaying ?? false {
-                self?.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
+            if wself.itemModels[wself.currItemModelIndex].mediaType == .Image, wself.isNormalPlaying {
+                wself.imageRecordProgress(progress: CMTimeGetSeconds(currentT))
             }
         }, didPlayToEndTime: { [weak self] recordInfo, currentItem in
             
@@ -2414,9 +2447,14 @@ public extension BFRecordScreenController {
                 }
             }
         }) { [weak self] _, _ in
-            if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .Image {
-                DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 3) {
-                    self?.startPlayRecord(time: self?.currentAssetProgress ?? CMTime.zero)
+            
+            guard let wself = self else { return }
+            
+            if wself.itemModels[wself.currItemModelIndex].mediaType == .Image {
+                DispatchQueue.main.asyncAfter(deadline: DispatchTime.now() + 3) {[weak self] in
+                    guard let wself = self else { return }
+                    
+                    wself.startPlayRecord(time: wself.currentAssetProgress)
                 }
             }
         }
@@ -2438,14 +2476,16 @@ public extension BFRecordScreenController {
 
     /// 重置进度
     func resetCurrentProgress() {
-        DispatchQueue.main.async { [weak self] in
-            if !(self?.isRecording ?? false), CMTimeCompare((self?.currentAssetProgress ?? .zero), (self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? .zero)) > 0 {
+        DispatchQueue.main.async {[weak self] in
+            guard let wself = self else { return }
+            
+            if !(wself.isRecording ?? false), CMTimeCompare((wself.currentAssetProgress ?? .zero), (wself.itemModels[wself.currItemModelIndex ?? 0].materialDuraion ?? .zero)) > 0 {
                 return
             }
-//            BFLog(1, message: "更新录音进度\(#function)-\(self?.currentAssetProgress.seconds ?? 0)")
-            self?.progreddL.text = String(format: "%@", (self?.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
-            self?.progressThumV.progress = (self?.currentAssetProgress.seconds ?? 0)
-            self?.updateSubtitle(time: self?.currentAssetProgress ?? CMTime.zero)
+//            BFLog(1, message: "更新录音进度\(#function)-\(wself.currentAssetProgress.seconds ?? 0)")
+            wself.progreddL.text = String(format: "%@", (wself.currentAssetProgress.seconds ?? 0).formatDurationToHMS())
+            wself.progressThumV.progress = (wself.currentAssetProgress.seconds ?? 0)
+            wself.updateSubtitle(time: wself.currentAssetProgress ?? CMTime.zero)
         }
     }
 

+ 4 - 2
BFRecordScreenKit/Classes/RecordScreen/View/ProgressView/BFCameraProgressView.swift

@@ -54,8 +54,10 @@ class BFCameraProgressView: BFProgressBaseView {
     /// - Parameter images:
     func addThumbImages() {
         if let count = recordItem?.thumbImgs.count, count > 0 {
-            DispatchQueue.main.async {
-                self.collectionV.insertItems(at: [IndexPath(row: count - 1, section: 0)])
+            DispatchQueue.main.async {[weak self] in
+                guard let wself = self else { return }
+                
+                wself.collectionV.insertItems(at: [IndexPath(row: count - 1, section: 0)])
             }
         }
     }

+ 12 - 12
BFRecordScreenKit/Classes/RecordScreen/View/ProgressView/BFVideoThumbProgressView.swift

@@ -189,32 +189,32 @@ class BFVideoThumbProgressView: BFProgressBaseView {
 
     func appendThumb(progress: Double = 0) {
         DispatchQueue.main.async { [weak self] in
-            guard let sslf = self else { return }
+            guard let wself = self else { return }
             let count: Int = Int(progress / 2)
 //            BFLog(message: "需要的图片个数:progress=\(progress),count=\(count)")
-            if sslf.recordItem?.mediaType == .Image, (sslf.progressView.contentView.subviews.count - 6) < count {
-                guard let image = (sslf.recordItem?.thumbImgs.first ?? sslf.recordItem?.coverImg) else {
+            if wself.recordItem?.mediaType == .Image, (wself.progressView.contentView.subviews.count - 6) < count {
+                guard let image = (wself.recordItem?.thumbImgs.first ?? wself.recordItem?.coverImg) else {
                     return
                 }
-                if sslf.lastImg != nil, sslf.lastImg?.superview != nil {
-                    sslf.lastImg?.removeFromSuperview()
+                if wself.lastImg != nil, wself.lastImg?.superview != nil {
+                    wself.lastImg?.removeFromSuperview()
                 }
-                let lastIndex = sslf.progressView.contentView.subviews.count - 1
+                let lastIndex = wself.progressView.contentView.subviews.count - 1
                 for i in lastIndex ... lastIndex + 10 {
                     let iv = UIImageView(image: image)
                     iv.contentMode = .scaleAspectFill
                     iv.clipsToBounds = true
-                    sslf.progressView.contentView.addSubview(iv)
+                    wself.progressView.contentView.addSubview(iv)
                     iv.snp.makeConstraints { make in
-                        make.left.equalTo(CGFloat(i) * CGFloat(sslf.thumbImageWidth) + sslf.width * 0.5)
+                        make.left.equalTo(CGFloat(i) * CGFloat(wself.thumbImageWidth) + wself.width * 0.5)
                         make.top.bottom.equalToSuperview()
                         make.height.equalTo(50)
-                        make.width.equalTo(sslf.thumbImageWidth)
+                        make.width.equalTo(wself.thumbImageWidth)
                     }
-                    sslf.lastImg = iv
+                    wself.lastImg = iv
                 }
-                sslf.lastImg?.snp.makeConstraints { make in
-                    make.right.equalTo(sslf.width * -0.5)
+                wself.lastImg?.snp.makeConstraints { make in
+                    make.right.equalTo(wself.width * -0.5)
                 }
             }
         }