|
@@ -91,15 +91,18 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
var index = sself.itemModels[sself.currItemModelIndex].voiceStickers.count - 1
|
|
|
while index >= 0{
|
|
|
let m = sself.itemModels[sself.currItemModelIndex].voiceStickers[index]
|
|
|
+
|
|
|
+ // 找到比新录的早的录音,跳出判断
|
|
|
+ if model.startTime >= m.endTime {
|
|
|
+ break
|
|
|
+ }
|
|
|
+
|
|
|
index -= 1
|
|
|
if model.endTime > m.startTime && model.endTime <= m.endTime
|
|
|
|| model.startTime <= m.startTime && model.startTime > m.endTime{
|
|
|
sself.itemModels[sself.currItemModelIndex].voiceStickers.remove(at: index+1)
|
|
|
continue
|
|
|
}
|
|
|
- if model.startTime < m.endTime {
|
|
|
- break
|
|
|
- }
|
|
|
}
|
|
|
BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
|
|
@@ -126,6 +129,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
l.textColor = .white
|
|
|
l.shadowColor = .black
|
|
|
l.shadowOffset = CGSize(width: 1, height: 1)
|
|
|
+ l.text = "00:00"
|
|
|
return l
|
|
|
}()
|
|
|
|
|
@@ -207,6 +211,10 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
lazy var progressThumV : BFVideoThumbProgressView = {
|
|
|
let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
|
|
|
+ vv.dragStartHandle = { [weak self] in
|
|
|
+ self?.isDragingProgressSlder = true
|
|
|
+ self?.pause()
|
|
|
+ }
|
|
|
vv.dragScrollProgressHandle = {[weak self] isStart, process in
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
guard let sself = self else {
|
|
@@ -215,11 +223,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if isStart {
|
|
|
sself.events.append(WithDrawModel(type: 0, timestamp: sself.currentAssetProgress.seconds))
|
|
|
}
|
|
|
- if sself.isNormalPlaying || sself.isRecording {
|
|
|
- sself.pause()
|
|
|
- }
|
|
|
sself.isDragingProgressSlder = true
|
|
|
- BFLog(1, message: "drag 进行中")
|
|
|
|
|
|
sself.changeProgress(progress: process)
|
|
|
}
|
|
@@ -254,6 +258,11 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
hiddenNavigation()
|
|
|
}
|
|
|
|
|
|
+ public override func viewWillDisappear(_ animated: Bool) {
|
|
|
+ super.viewWillDisappear(animated)
|
|
|
+ pause()
|
|
|
+ }
|
|
|
+
|
|
|
public override func viewDidLoad(){
|
|
|
super.viewDidLoad()
|
|
|
_ = disablePopGesture()
|
|
@@ -385,7 +394,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
recorderManager.voiceModel = model
|
|
|
recorderManager.startRecord(index: 1)
|
|
|
// movie?.startProcessing()
|
|
|
- assetPlayer?.volume = 0
|
|
|
+// assetPlayer?.volume = 0
|
|
|
assetPlayer?.play()
|
|
|
playBtn.isSelected = true
|
|
|
}
|
|
@@ -556,6 +565,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// 创建播放器
|
|
|
if self.recordPlayer == nil || (self.recordPlayer?.currentItem?.asset as? AVURLAsset)?.url.lastPathComponent != (recordedAudio.wavFilePath as NSString).lastPathComponent {
|
|
|
let newItem = AVPlayerItem(url: URL(fileURLWithPath: recordedAudio.wavFilePath))
|
|
|
+ BFLog(1, message: "录音播放器初始化:\(self.recordPlayer == nil ? "init player" : "replace item")")
|
|
|
+
|
|
|
if let player = self.recordPlayer {
|
|
|
player.pause()
|
|
|
if let playItem = player.currentItem {
|
|
@@ -576,21 +587,29 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
self?.currentPlayRecordIndex = -1
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
- synced(currentPlayRecordIndex) {
|
|
|
+ if recordPlayer?.currentItem?.duration.timescale == 0 {
|
|
|
+ BFLog(1, message: "时间timescale == 0")
|
|
|
+ }
|
|
|
+ synced(currentPlayRecordIndex) {[weak self] in
|
|
|
+ guard let self = self else {
|
|
|
+ return
|
|
|
+ }
|
|
|
+ BFLog(1, message: "判断是否开始录音播放** hadPrepareToPlayRecord:\(hadPrepareToPlayRecord), currentPlayRecordIndex:\(currentPlayRecordIndex), isNormalPlaying :\(self.isNormalPlaying)")
|
|
|
+
|
|
|
if !hadPrepareToPlayRecord
|
|
|
+ && recordPlayer?.currentItem?.duration.timescale != 0
|
|
|
&& CMTimeGetSeconds(currentT) >= recordedAudio.startTime
|
|
|
&& CMTimeGetSeconds(currentT) <= recordedAudio.endTime - 0.2 // 这个条件是避免录音结束后有小幅度回退导致播放最新录音
|
|
|
{
|
|
|
// 应当开始播放了
|
|
|
// 两个逻辑:如果在播,则跳过;如果暂停拖动到中间,则seek
|
|
|
- if currentPlayRecordIndex == -1 && self.isNormalPlaying{
|
|
|
+ if currentPlayRecordIndex == -1 && self.isNormalPlaying {
|
|
|
let second = CMTimeGetSeconds(currentT) - recordedAudio.startTime
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
- self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second), timescale: 100), toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000), completionHandler: {[weak self] finished in
|
|
|
+ self?.recordPlayer?.seek(to: CMTime(value: CMTimeValue(second*1000000), timescale: 1000000), toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000), completionHandler: {[weak self] finished in
|
|
|
if finished && (self?.isNormalPlaying ?? false) {
|
|
|
self?.recordPlayer?.play()
|
|
|
- BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero))")
|
|
|
+ BFLog(1, message: "录音开始播放 playing, \(second), \(CMTimeGetSeconds(self?.recordPlayer?.currentItem?.duration ?? .zero)), \(self?.recordPlayer?.currentItem?.currentTime().seconds)")
|
|
|
|
|
|
}
|
|
|
})
|
|
@@ -623,7 +642,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
func play(){
|
|
|
BFLog(1, message: "开始播放 \(self.currentAssetProgress.seconds)")
|
|
|
isNormalPlaying = true
|
|
|
- assetPlayer?.volume = 0.2
|
|
|
+// assetPlayer?.volume = 0.2
|
|
|
movie?.startProcessing()
|
|
|
if isEndPlay {
|
|
|
isEndPlay = false
|
|
@@ -676,24 +695,24 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
})
|
|
|
|
|
|
-// let option = PHImageRequestOptions()
|
|
|
-// option.isNetworkAccessAllowed = true //允许下载iCloud的图片
|
|
|
-// option.resizeMode = .fast
|
|
|
-// option.deliveryMode = .highQualityFormat
|
|
|
-// PHImageManager.default().requestImage(for: asset,
|
|
|
-// targetSize: self.view.bounds.size,
|
|
|
-// contentMode: .aspectFit,
|
|
|
-// options: option)
|
|
|
-// { (image, nil) in
|
|
|
-// // 设置首帧/封面
|
|
|
-// if image != nil {
|
|
|
-// let pic = GPUImagePicture(image: image)
|
|
|
-// let filet = GPUImageFilter()
|
|
|
-// pic?.addTarget(filet)
|
|
|
-// filet.addTarget(self.playView)
|
|
|
-// pic?.processImage()
|
|
|
-// }
|
|
|
-// }
|
|
|
+ let option = PHImageRequestOptions()
|
|
|
+ option.isNetworkAccessAllowed = true //允许下载iCloud的图片
|
|
|
+ option.resizeMode = .fast
|
|
|
+ option.deliveryMode = .highQualityFormat
|
|
|
+ PHImageManager.default().requestImage(for: asset,
|
|
|
+ targetSize: self.view.bounds.size,
|
|
|
+ contentMode: .aspectFit,
|
|
|
+ options: option)
|
|
|
+ { (image, nil) in
|
|
|
+ // 设置首帧/封面
|
|
|
+ if image != nil {
|
|
|
+ let pic = GPUImagePicture(image: image)
|
|
|
+ let filet = GPUImageFilter()
|
|
|
+ pic?.addTarget(filet)
|
|
|
+ filet.addTarget(self.playView)
|
|
|
+ pic?.processImage()
|
|
|
+ }
|
|
|
+ }
|
|
|
|
|
|
PHCachingImageManager().requestAVAsset(forVideo: asset, options: options, resultHandler: {[weak self] (asset: AVAsset?, audioMix: AVAudioMix?, info) in
|
|
|
if let urlasset = asset as? AVURLAsset {
|
|
@@ -732,6 +751,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
assetPlayer?.replaceCurrentItem(with: item)
|
|
|
}else {
|
|
|
assetPlayer = AVPlayer(playerItem: item)
|
|
|
+ assetPlayer?.volume = 0
|
|
|
avplayerTimeObserver = assetPlayer?.addPeriodicTimeObserver(forInterval: CMTime(value: 1, timescale: 100), queue: DispatchQueue.global()) {[weak self] time in
|
|
|
// 进度监控
|
|
|
|
|
@@ -739,14 +759,15 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
BFLog(1, message: "curr:\(CMTimeGetSeconds(time))")
|
|
|
if CMTimeGetSeconds(item.duration) > 0 {
|
|
|
DispatchQueue.main.async { [weak self] in
|
|
|
- self?.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(time), CMTimeGetSeconds(item.duration))
|
|
|
- if !(self?.isDragingProgressSlder ?? false){
|
|
|
+ self?.progreddL.text = String(format: "%@", CMTimeGetSeconds(time).formatDurationToHMS())
|
|
|
+ let su = !(self?.isDragingProgressSlder ?? false) || !(self?.isRecording ?? false && self?.isNormalPlaying ?? false)
|
|
|
+ if su{
|
|
|
self?.progressThumV.progress = time.seconds
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- if (self?.isNormalPlaying ?? false) || (self?.isRecording ?? false) {
|
|
|
+ if self?.isNormalPlaying ?? false {
|
|
|
// 播放对应的录音音频
|
|
|
self?.playRecord(at: time)
|
|
|
}
|
|
@@ -790,7 +811,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
if let duration = assetPlayer?.currentItem?.duration {
|
|
|
self.currentAssetProgress = CMTime(value: CMTimeValue(progress * Float(CMTimeGetSeconds(duration)) * 100), timescale: 100)
|
|
|
DispatchQueue.main.async {[weak self] in
|
|
|
- self!.progreddL.text = String(format: "%.2f", CMTimeGetSeconds(self!.currentAssetProgress))
|
|
|
+ self!.progreddL.text = String(format: "%@", CMTimeGetSeconds(self!.currentAssetProgress).formatDurationToHMS())
|
|
|
}
|
|
|
|
|
|
assetPlayer!.seek(to: self.currentAssetProgress, toleranceBefore: CMTime(value: 1, timescale: 1000000), toleranceAfter: CMTime(value: 1, timescale: 1000000)) { finished in
|