|
@@ -95,65 +95,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
public var haveSpeakVolume : Float = 0.0
|
|
|
public var noSpeakVolume : Float = 1.0
|
|
|
|
|
|
- lazy var recorderManager : BFVoiceRecordManager = {
|
|
|
- let manager = BFVoiceRecordManager()
|
|
|
- manager.cancelRecordHandle = { _ in
|
|
|
- }
|
|
|
- manager.endRecordHandle = { [weak self] model, _ in
|
|
|
- if let sself = self, let model = model, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
|
- // 加入到语音数组里
|
|
|
- model.endTime = sself.currentAssetProgress.seconds
|
|
|
-
|
|
|
- let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
|
|
|
-
|
|
|
- var deletedVoices = [(PQVoiceModel, Int)]()
|
|
|
-
|
|
|
- for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
|
|
|
- let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
-
|
|
|
- if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
|
|
|
- deletedVoices.append((m, i))
|
|
|
- continue
|
|
|
- }
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
|
|
|
- let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
- return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
|
|
|
- }
|
|
|
- BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
-
|
|
|
- var event = sself.events.last
|
|
|
- if event != nil {
|
|
|
- event!.deletedVoices = deletedVoices
|
|
|
- sself.events.removeLast()
|
|
|
- sself.events.append(event!)
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
- if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
- var duration: Double = 0
|
|
|
- sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
|
|
|
- duration = duration + (Double(model.duration ?? "0") ?? 0)
|
|
|
- }
|
|
|
- sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
- sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
|
|
|
- model.endTime = sself.currentAssetProgress.seconds
|
|
|
- self?.isEndPlay = true
|
|
|
- }
|
|
|
- DispatchQueue.main.async {[weak self] in
|
|
|
- // 录音完,重绘撤销按钮,更新录音按钮,
|
|
|
- self?.changeWithDrawBtnLayout(true)
|
|
|
- // 注:在录制结束时矫正当前位置,避免跟指针无法对其
|
|
|
- self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
|
|
|
- self?.deleteRecordBtn.isHidden = true
|
|
|
- self?.recordBtn.isHidden = false
|
|
|
- }
|
|
|
- sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
- // 重置录制开始时间
|
|
|
- sself.recordStartTime = 0
|
|
|
- }
|
|
|
- }
|
|
|
- return manager
|
|
|
- }()
|
|
|
+ //录音管理器
|
|
|
+ var recorderManager : BFVoiceRecordManager?
|
|
|
|
|
|
// MARK: - 视图参数
|
|
|
|
|
@@ -344,7 +287,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}()
|
|
|
|
|
|
// 录音识别文字
|
|
|
- var speechTranscriberUtil : PQSpeechTranscriberUtil?
|
|
|
+// var speechTranscriberUtil : PQSpeechTranscriberUtil?
|
|
|
|
|
|
lazy var progressThumV : BFVideoThumbProgressView = {
|
|
|
let vv = BFVideoThumbProgressView(frame: CGRect(x: 0, y: 54, width: cScreenWidth, height: 50))
|
|
@@ -414,7 +357,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
NotificationCenter.default.removeObserver(self)
|
|
|
avplayerTimeObserver?.invalidate()
|
|
|
if isRecording {
|
|
|
- recorderManager.stopRecord(isCancel: true)
|
|
|
+ recorderManager?.stopRecord(isCancel: true)
|
|
|
}
|
|
|
assetPlayer?.pause()
|
|
|
recordPlayer?.pause()
|
|
@@ -438,7 +381,116 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// add by ak 取 nsl token
|
|
|
BFRecordScreenViewModel.getNlsAccessToken { [weak self] token, appkey in
|
|
|
BFLog(message: "nls appkey is \(appkey), token is \(token)")
|
|
|
- self?.speechTranscriberUtil = PQSpeechTranscriberUtil(token, appid: appkey)
|
|
|
+ self?.recorderManager = BFVoiceRecordManager.init(token: token, appid: appkey)
|
|
|
+
|
|
|
+ self?.recorderManager?.cancelRecordHandle = { _ in
|
|
|
+ }
|
|
|
+ self?.recorderManager?.recorderProgrossHandle = {[weak self] progress in
|
|
|
+ BFLog(1, message: "curr:录音进度--\(progress) \(self?.recordStartTime ) \(self?.isRecording)")
|
|
|
+ if self?.indirectionView == nil {
|
|
|
+ self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
|
|
|
+ self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
|
|
|
+ }
|
|
|
+ // 更新录制进度
|
|
|
+ // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
+ // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
+ if self?.isRecording ?? false {
|
|
|
+ let ratioX = 0.08
|
|
|
+ self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: (progress ?? 0.0) - ratioX)
|
|
|
+ }
|
|
|
+ if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
|
|
|
+ self?.imageRecordProgress(isRecord: true, progress: progress ?? 0)
|
|
|
+ }
|
|
|
+
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ self?.recorderManager?.subtitleRecordHandle = {[weak self] asrResult in
|
|
|
+ if(asrResult == nil){
|
|
|
+ BFLog(message: "识别结果为空????不能生成字幕数据")
|
|
|
+ return
|
|
|
+ }
|
|
|
+ let dicResult: [String: Any]? = jsonStringToDictionary(asrResult!)
|
|
|
+
|
|
|
+ let payload = dicResult?["payload"] as? [String: Any]
|
|
|
+
|
|
|
+ BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(self?.recorderManager?.voiceModel?.startTime ?? 0.0)")
|
|
|
+ DispatchQueue.main.async {
|
|
|
+ // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
+
|
|
|
+ let newSubtitle = PQEditSubTitleModel()
|
|
|
+
|
|
|
+ //这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
+ newSubtitle.timelineIn = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
+
|
|
|
+ newSubtitle.timelineOut = (self?.recorderManager?.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
+ var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
+ if showText.count > subtitleMaxlength {
|
|
|
+ showText = showText.substring(to: subtitleMaxlength)
|
|
|
+ showText += "..."
|
|
|
+ }
|
|
|
+ newSubtitle.text = showText
|
|
|
+
|
|
|
+ BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
|
|
|
+ newSubtitle.setting = self?.subtitleSettingView.subtitle.setting ?? BFSubTitileSettingModel()
|
|
|
+
|
|
|
+ self?.itemModels[self?.currItemModelIndex ?? 0].titleStickers.append(newSubtitle)
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+ self?.recorderManager?.endRecordHandle = { [weak self] voideModel, _ in
|
|
|
+ if let sself = self, let model = voideModel, FileManager.default.fileExists(atPath: model.wavFilePath ?? "") {
|
|
|
+ // 加入到语音数组里
|
|
|
+ model.endTime = sself.currentAssetProgress.seconds
|
|
|
+
|
|
|
+ let newRange = CMTimeRange(start: CMTime(seconds: model.startTime, preferredTimescale: 1000), end: CMTime(seconds: model.endTime, preferredTimescale: 1000))
|
|
|
+
|
|
|
+ var deletedVoices = [(PQVoiceModel, Int)]()
|
|
|
+
|
|
|
+ for (i, m) in sself.itemModels[sself.currItemModelIndex].voiceStickers.enumerated() {
|
|
|
+ let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
+
|
|
|
+ if CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0 {
|
|
|
+ deletedVoices.append((m, i))
|
|
|
+ continue
|
|
|
+ }
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.removeAll { m in
|
|
|
+ let originRange = CMTimeRange(start: CMTime(seconds: m.startTime, preferredTimescale: 1000), end: CMTime(seconds: m.endTime, preferredTimescale: 1000))
|
|
|
+ return CMTimeRangeGetIntersection(originRange, otherRange: newRange).duration.seconds > 0
|
|
|
+ }
|
|
|
+ BFLog(1, message: "添加录音文件:\(model.startTime) -- \(model.endTime)")
|
|
|
+
|
|
|
+ var event = sself.events.last
|
|
|
+ if event != nil {
|
|
|
+ event!.deletedVoices = deletedVoices
|
|
|
+ sself.events.removeLast()
|
|
|
+ sself.events.append(event!)
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.append(model)
|
|
|
+ if sself.itemModels[sself.currItemModelIndex].mediaType == .IMAGE {
|
|
|
+ var duration: Double = 0
|
|
|
+ sself.itemModels[sself.currItemModelIndex].voiceStickers.forEach { _ in
|
|
|
+ duration = duration + (Double(model.duration ?? "0") ?? 0)
|
|
|
+ }
|
|
|
+ sself.itemModels[sself.currItemModelIndex].materialDuraion = Double(String(format: "%.3f", duration)) ?? 0
|
|
|
+ sself.currentAssetProgress = CMTime(seconds: duration, preferredTimescale: 1000)
|
|
|
+ model.endTime = sself.currentAssetProgress.seconds
|
|
|
+ self?.isEndPlay = true
|
|
|
+ }
|
|
|
+ DispatchQueue.main.async {[weak self] in
|
|
|
+ // 录音完,重绘撤销按钮,更新录音按钮,
|
|
|
+ self?.changeWithDrawBtnLayout(true)
|
|
|
+ // 注:在录制结束时矫正当前位置,避免跟指针无法对其
|
|
|
+ self?.indirectionView?.resetCurrentItem(start: model.startTime, end: model.endTime)
|
|
|
+ self?.deleteRecordBtn.isHidden = true
|
|
|
+ self?.recordBtn.isHidden = false
|
|
|
+ }
|
|
|
+ sself.currentPlayRecordIndex = -3 // 刚录音完,不需要播放录音
|
|
|
+ // 重置录制开始时间
|
|
|
+ sself.recordStartTime = 0
|
|
|
+ }
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
view.backgroundColor = .black
|
|
@@ -525,6 +577,8 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
/// 更新字幕,在回放时使用
|
|
|
/// - Parameter time: 当前播放的进度
|
|
|
func updateSubtitle(time: CMTime) {
|
|
|
+
|
|
|
+ BFLog(message: "currTime is \(CMTimeGetSeconds(time))")
|
|
|
var findShowSubtitle: PQEditSubTitleModel?
|
|
|
for (index, subtitle) in itemModels[currItemModelIndex].titleStickers.enumerated() {
|
|
|
if subtitle.timelineIn <= CMTimeGetSeconds(time), subtitle.timelineOut >= CMTimeGetSeconds(time) {
|
|
@@ -761,46 +815,28 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
pause()
|
|
|
isRecording = true
|
|
|
|
|
|
+ if(self.recorderManager == nil){
|
|
|
+ BFLog(message: "录音机初始化错误!!!")
|
|
|
+ return
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
let model = PQVoiceModel()
|
|
|
model.startTime = currentAssetProgress.seconds
|
|
|
model.volume = 100
|
|
|
- recorderManager.voiceModel = model
|
|
|
- recorderManager.startRecord(index: 1)
|
|
|
+ recorderManager?.voiceModel = model
|
|
|
+ recorderManager?.startRecord(index: 1)
|
|
|
if recordStartTime <= 0 {
|
|
|
recordStartTime = currentAssetProgress.seconds
|
|
|
}
|
|
|
// 添加撤销记录点
|
|
|
events.append(WithDrawModel(type: 2, timestamp: model.startTime))
|
|
|
-
|
|
|
-// DispatchQueue.main.async {[weak self] in
|
|
|
-// let model = PQVoiceModel()
|
|
|
-// model.startTime = self?.currentAssetProgress.seconds ?? 0
|
|
|
-// model.volume = 100
|
|
|
-// self?.recorderManager.voiceModel = model
|
|
|
-// self?.recorderManager.startRecord(index: 1)
|
|
|
-// if self?.recordStartTime ?? 0 <= 0 {
|
|
|
-// self?.recordStartTime = self?.currentAssetProgress.seconds ?? 0
|
|
|
-// }
|
|
|
-// // 添加撤销记录点
|
|
|
-// self?.events.append(WithDrawModel(type: 2, timestamp: model.startTime))
|
|
|
-//
|
|
|
-// }
|
|
|
-
|
|
|
-
|
|
|
- isRecording = true
|
|
|
+
|
|
|
|
|
|
if !avatarView.isHidden {
|
|
|
avatarView.beginRecord()
|
|
|
}
|
|
|
-
|
|
|
- DispatchQueue.global().async { [weak self] in
|
|
|
- guard let sself = self else {
|
|
|
- return
|
|
|
- }
|
|
|
- sself.speechTranscriberUtil?.delegate = sself
|
|
|
- sself.speechTranscriberUtil?.startTranscriber()
|
|
|
- }
|
|
|
-
|
|
|
+
|
|
|
if itemModels[currItemModelIndex].mediaType == .VIDEO {
|
|
|
|
|
|
if !movieIsProcessing {
|
|
@@ -813,36 +849,17 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
// 暂停状态
|
|
|
(collectionView.cellForItem(at: IndexPath(item: currItemModelIndex, section: 0)) as? BFImageCoverViewCell)?.playBtn.isSelected = true
|
|
|
}
|
|
|
-
|
|
|
- recorderManager.audioRecorder?.recorderProgross = { [weak self] progress in
|
|
|
- BFLog(1, message: "curr:录音进度--\(progress)")
|
|
|
- if self?.indirectionView == nil {
|
|
|
- self?.indirectionView = BFIndirectionProgressView(frame: self?.progressThumV.progessIndicateBackV.bounds ?? CGRect.zero, percenWidth: self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE ? (self?.progressThumV.thumbImageWidth ?? 0) / 2 : 0, totalDuration: self?.itemModels[self?.currItemModelIndex ?? 0].materialDuraion ?? 0)
|
|
|
- self?.progressThumV.progessIndicateBackV.addSubview((self?.indirectionView)!)
|
|
|
- }
|
|
|
- // 更新录制进度
|
|
|
- // 注:视频无法以录制进度驱动,因当录音开始录音时播放器还未播放,导致进度不一致
|
|
|
- // 注:在录制停止时,视频播放器进度依然在走,误差在80毫秒左右
|
|
|
- if self?.isRecording ?? false {
|
|
|
- let ratioX = 0.08
|
|
|
- self?.indirectionView?.setProgress(start: self?.recordStartTime ?? 0, progress: progress - ratioX)
|
|
|
- }
|
|
|
- if self?.itemModels[self?.currItemModelIndex ?? 0].mediaType == .IMAGE {
|
|
|
- self?.imageRecordProgress(isRecord: true, progress: progress)
|
|
|
- }
|
|
|
- }
|
|
|
+
|
|
|
}
|
|
|
|
|
|
@objc func endRecord() {
|
|
|
- DispatchQueue.global().async {
|
|
|
- self.speechTranscriberUtil?.endTranscriber()
|
|
|
- }
|
|
|
+
|
|
|
// playBtn.isSelected = true
|
|
|
// 存储录音
|
|
|
isRecording = false
|
|
|
pause()
|
|
|
|
|
|
- recorderManager.endRecord()
|
|
|
+ recorderManager?.endRecord()
|
|
|
|
|
|
if !avatarView.isHidden {
|
|
|
avatarView.endRecord()
|
|
@@ -851,7 +868,7 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
|
|
|
func cancleRecord() {
|
|
|
isRecording = false
|
|
|
- recorderManager.cancleRecord()
|
|
|
+ recorderManager?.cancleRecord()
|
|
|
|
|
|
pause()
|
|
|
}
|
|
@@ -1401,58 +1418,6 @@ public class BFRecordScreenController: BFBaseViewController {
|
|
|
}
|
|
|
}
|
|
|
}
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: GPUImageMovieDelegate {
|
|
|
- public func didCompletePlayingMovie() {
|
|
|
- BFLog(1, message: "播放结束")
|
|
|
- currentPlayRecordIndex = -1
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: AVAudioRecorderDelegate {
|
|
|
- public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
|
|
|
- BFLog(1, message: "录音结束")
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-extension BFRecordScreenController: AVAudioPlayerDelegate {
|
|
|
- public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
|
|
|
- BFLog(1, message: "录音播放结束")
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-// MARK: - 语音转文字代理
|
|
|
-
|
|
|
-extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
|
|
|
- public func eventCallback(_: PQSpeechTranscriberUtil, asrResult: String) {
|
|
|
- let dicResult: [String: Any]? = jsonStringToDictionary(asrResult)
|
|
|
-
|
|
|
- let payload = dicResult?["payload"] as? [String: Any]
|
|
|
-
|
|
|
- BFLog(message: "识别结果:) \((payload?["result"])!) startTime:\(recorderManager.voiceModel?.startTime ?? 0.0)")
|
|
|
- DispatchQueue.main.async {
|
|
|
- // 1,保存字幕数据 begin_time是开始出现文字的时间,time 是结束文字出现的时间 单位都为毫秒,都是相对于录制音频数据整段时间。self.recorderManager.voiceModel?.startTime 为开始的录制的时间,开始和结束都要加上这个时差
|
|
|
-
|
|
|
- let newSubtitle = PQEditSubTitleModel()
|
|
|
-
|
|
|
- //这里加300ms 是因为返回结果为了切到字,时长提前一些时间,具体时间官方没说和原音频有关系。这里我们先延后300ms 单位:毫秒。
|
|
|
- newSubtitle.timelineIn = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64((((payload?["begin_time"]) as? Int) ?? 0) + 300) / 1000.0
|
|
|
-
|
|
|
- newSubtitle.timelineOut = (self.recorderManager.voiceModel?.startTime ?? 0.0) + Float64(((payload?["time"]) as? Int) ?? 0) / 1000.0
|
|
|
- var showText = ((payload?["result"]) as? String) ?? ""
|
|
|
- if showText.count > subtitleMaxlength {
|
|
|
- showText = showText.substring(to: subtitleMaxlength)
|
|
|
- showText += "..."
|
|
|
- }
|
|
|
- newSubtitle.text = showText
|
|
|
-
|
|
|
- BFLog(message: "添加字幕数据 timelineIn \(newSubtitle.timelineIn) timelineOut \(newSubtitle.timelineOut) text \(newSubtitle.text)")
|
|
|
- newSubtitle.setting = self.subtitleSettingView.subtitle.setting
|
|
|
-
|
|
|
- self.itemModels[self.currItemModelIndex].titleStickers.append(newSubtitle)
|
|
|
- }
|
|
|
- }
|
|
|
|
|
|
func reloadMaterial(recordItem : BFRecordItemModel) {
|
|
|
|
|
@@ -1476,6 +1441,28 @@ extension BFRecordScreenController: PQSpeechTranscriberUtilDelegate {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+extension BFRecordScreenController: GPUImageMovieDelegate {
|
|
|
+ public func didCompletePlayingMovie() {
|
|
|
+ BFLog(1, message: "播放结束")
|
|
|
+ currentPlayRecordIndex = -1
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+extension BFRecordScreenController: AVAudioRecorderDelegate {
|
|
|
+ public func audioRecorderDidFinishRecording(_: AVAudioRecorder, successfully _: Bool) {
|
|
|
+ BFLog(1, message: "录音结束")
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+extension BFRecordScreenController: AVAudioPlayerDelegate {
|
|
|
+ public func audioPlayerDidFinishPlaying(_: AVAudioPlayer, successfully _: Bool) {
|
|
|
+ BFLog(1, message: "录音播放结束")
|
|
|
+ }
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
// MARK: - UICollectionViewDelegate
|
|
|
|
|
|
/// UICollectionViewDelegate
|