|
@@ -65,6 +65,11 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
|
|
|
}()
|
|
|
|
|
|
+ lazy var gropQueue : DispatchGroup = {
|
|
|
+ let group = DispatchGroup() //创建group
|
|
|
+ return group
|
|
|
+ }()
|
|
|
+
|
|
|
var avplayerTimeObserver: NSKeyValueObservation?
|
|
|
|
|
|
lazy var avplayer : AVPlayer = {
|
|
@@ -154,9 +159,9 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
startRecord()
|
|
|
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3, execute: { [weak self] in
|
|
|
guard let wself = self else { return }
|
|
|
-
|
|
|
+ wself.gropQueue.leave()
|
|
|
+ wself.gropQueue.leave()
|
|
|
wself.endRecord()
|
|
|
- BFRecordScreenCameraManager.firstOpenCamera = false
|
|
|
})
|
|
|
}
|
|
|
|
|
@@ -216,6 +221,25 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
// wself.getThumImage()
|
|
|
})
|
|
|
RunLoop.current.add(timerr!, forMode: .common)
|
|
|
+
|
|
|
+ gropQueue.enter() // 录音
|
|
|
+ gropQueue.enter() // 录像
|
|
|
+ gropQueue.notify(queue: DispatchQueue.main) {[weak self] in
|
|
|
+ guard let wself = self else { return }
|
|
|
+ if BFRecordScreenCameraManager.firstOpenCamera{
|
|
|
+ BFRecordScreenCameraManager.firstOpenCamera = false
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let cameraSuccess = wself.recordFinishedResult && ((wself.videoModel.timelineCMOut - wself.videoModel.timelineCMIn).seconds > 1)
|
|
|
+ if cameraSuccess {
|
|
|
+ wself.currentAssetProgress = wself.videoModel.timelineCMOut
|
|
|
+ wself.recordEndCallBack?(true, wself.videoModel)
|
|
|
+ }else {
|
|
|
+ wself.videoModel.locationPath = "nil"
|
|
|
+ wself.revertLast()
|
|
|
+ wself.recordEndCallBack?(false, nil)
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
override func endRecord(){
|
|
@@ -237,28 +261,12 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
wself.videoModel.timelineCMOut = wself.videoModel.timelineCMIn + CMTime(seconds: dur.duration.seconds, preferredTimescale: 1000)
|
|
|
BFLog(1, message: "文件时长camera:\(wself.recordItem?.videoStickers.count ?? 0), \(wself.videoModel.timelineCMIn.seconds)~\(wself.videoModel.timelineCMOut.seconds), \(dur.duration.seconds)")
|
|
|
wself.videoModel.locationPath = finalPath
|
|
|
- if (wself.videoModel.timelineCMOut - wself.videoModel.timelineCMIn).seconds <= 1 {
|
|
|
- wself.videoModel.locationPath = "nil"
|
|
|
- wself.revertLast()
|
|
|
- wself.recordEndCallBack?(false, nil)
|
|
|
-
|
|
|
- }else{
|
|
|
- if wself.recordFinishedResult {
|
|
|
- // 代表录制成功了
|
|
|
- wself.currentAssetProgress = wself.videoModel.timelineCMOut
|
|
|
- wself.recordEndCallBack?(true, wself.videoModel)
|
|
|
- }
|
|
|
- }
|
|
|
}else{
|
|
|
- // 录制失败后恢复原样
|
|
|
- wself.revertLast()
|
|
|
- wself.recordEndCallBack?(false, nil)
|
|
|
cShowHUB(superView: nil, msg: "shoot_tips_least".BFLocale)
|
|
|
-
|
|
|
}
|
|
|
+ wself.gropQueue.leave()
|
|
|
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) {[weak self] in
|
|
|
guard let wself = self else { return }
|
|
|
-
|
|
|
wself.updateUI(progress: wself.currentAssetProgress)
|
|
|
}
|
|
|
|
|
@@ -272,23 +280,8 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
}
|
|
|
|
|
|
override func recordFinished(isSuccess:Bool) {
|
|
|
-
|
|
|
recordFinishedResult = isSuccess
|
|
|
-
|
|
|
- //
|
|
|
- if videoModel.locationPath != "nil" {
|
|
|
- if isSuccess {
|
|
|
- currentAssetProgress = videoModel.timelineCMOut
|
|
|
- recordEndCallBack?(true, videoModel)
|
|
|
- }else{
|
|
|
- if (videoModel.thumImgs?.count ?? 0) > 0 { //用图片数量来判断是否已经删除了当次录制
|
|
|
- revertLast()
|
|
|
- recordEndCallBack?(false, nil)
|
|
|
- }
|
|
|
- BFLog(1, message: "摄像录制小于1s")
|
|
|
- }
|
|
|
-
|
|
|
- }
|
|
|
+ gropQueue.leave()
|
|
|
}
|
|
|
|
|
|
func dealCamera(_ isSuccess: Bool) {
|
|
@@ -448,8 +441,8 @@ class BFRecordScreenCameraManager : BFRecordScreenBaseManager{
|
|
|
func prepareToPlayNext(needPlay:Bool = true){
|
|
|
if let mod = recordItem?.videoStickers.sorted(by: { m1, m2 in
|
|
|
m1.timelineCMIn.seconds < m2.timelineCMIn.seconds
|
|
|
- }).first(where: { mod in
|
|
|
- CMTimeCompare(mod.timelineCMIn, currentAssetProgress + CMTime(seconds: 0.33, preferredTimescale: 1000)) <= 0 && CMTimeCompare((currentAssetProgress + CMTime(seconds: 0.33, preferredTimescale: 1000)), mod.timelineCMOut) < 0
|
|
|
+ }).first(where: { mode in
|
|
|
+ CMTimeCompare(mode.timelineCMIn, currentAssetProgress + CMTime(seconds: 0.33, preferredTimescale: 1000)) <= 0 && CMTimeCompare((currentAssetProgress + CMTime(seconds: 0.33, preferredTimescale: 1000)), mode.timelineCMOut) < 0
|
|
|
}){
|
|
|
currPlayTime = mod.timelineCMIn
|
|
|
locationTo(time: currentAssetProgress)
|