Преглед на файлове

添加语音转 文字 SDK

jsonwang преди 3 години
родител
ревизия
107579b8cc

+ 1 - 0
BFFramework.podspec

@@ -61,4 +61,5 @@ TODO: Add long description of the pod here.
   s.dependency 'LMJHorizontalScrollText'  ,'2.0.2'
   s.dependency 'TXLiteAVSDK_Player'       ,'9.3.10765' # 腾讯播放器组件
   s.dependency 'Bugly'                    ,'2.5.90'   #crash log 收集
+  s.dependency 'NuiSDK'                   ,'2.5.14'
 end

+ 1 - 0
BFFramework/Classes/BFFramework_custom_umbrella.h

@@ -19,5 +19,6 @@
 #import "MemoryCoculation.h"
 #import <BFCommonKit/BFCommonKit-Swift.h>
 #import <BFNetRequestKit/BFNetRequestKit-Swift.h>
+#import "PQSpeechTranscriberUtil.h"
 
 #endif /* AliyunOSSiOS_Bridging_Header */

+ 62 - 0
BFFramework/Classes/Utils/NLSVoiceRecorder.h

@@ -0,0 +1,62 @@
+//
+//  NlsVoiceRecorder.h
+//  NlsClientSDK
+//
+//  Created by Shawn Chain on 13-11-22.
+//  Copyright (c) 2015年 Alibaba iDST. All rights reserved.
+//
+
+#import <Foundation/Foundation.h>
+
+/**
+ *@discuss NlsVoiceRecorder 各种回调接口
+ */
+@protocol NlsVoiceRecorderDelegate <NSObject>
+
+/**
+ * @discuss Recorder启动回调,在主线程中调用
+ */
+-(void) recorderDidStart;
+
+/**
+ * @discuss Recorde停止回调,在主线程中调用
+ */
+-(void) recorderDidStop;
+
+/**
+ * @discuss Recorder收录到数据,通常涉及VAD及压缩等操作,为了避免阻塞主线,因此将在在AudioQueue的线程中调用,注意线程安全!!!
+ */
+-(void) voiceRecorded:(NSData*) frame;
+
+/**
+ *@discussion 录音机无法打开或其他错误的时候会回调
+ */
+-(void) voiceDidFail:(NSError*)error;
+@end
+
+
+
+/**
+ *@discuss 封装了AudioQueue C API的录音机程序
+ */
+@interface NlsVoiceRecorder : NSObject
+
+@property(nonatomic,assign) id<NlsVoiceRecorderDelegate> delegate;
+
+@property(nonatomic,readonly) NSUInteger currentVoiceVolume;
+
+/**
+ * 开始录音
+ */
+-(void)start;
+
+/**
+ * 停止录音
+ */
+-(void)stop:(BOOL)shouldNotify;
+
+/**
+ * 是否在录音
+ */
+-(BOOL)isStarted;
+@end

+ 359 - 0
BFFramework/Classes/Utils/NLSVoiceRecorder.m

@@ -0,0 +1,359 @@
+//
+//  NlsVoiceRecorder.m
+//  NuiDemo
+//
+//  Created by Shawn Chain on 13-11-22.
+//  Copyright (c) 2015年 Alibaba iDST. All rights reserved.
+//
+
+#import "NLSVoiceRecorder.h"
+
+#import <AudioToolbox/AudioToolbox.h>
+#import <UIKit/UIApplication.h>
+#import <AVFoundation/AVFoundation.h>
+
+#define QUEUE_BUFFER_COUNT 3
+#define QUEUE_BUFFER_SIZE 640
+#define PCM_FRAME_BYTE_SIZE 640
+
+typedef enum {
+    STATE_INIT = 0,
+    STATE_START,
+    STATE_STOP
+}NlsVoiceRecorderState;
+ 
+#pragma mark - NlsVoiceRecorder Implementation
+
+@interface NlsVoiceRecorder(){
+    AudioQueueRef mQueue;
+    BOOL _inBackground;
+}
+@property(atomic,assign) NlsVoiceRecorderState state;
+@property(nonatomic,strong) NSMutableData *bufferedVoiceData;
+@property(nonatomic,assign,readwrite) NSUInteger currentVoiceVolume;
+@property(nonatomic,copy) NSString *originCategory;
+
+@end
+
+
+@implementation NlsVoiceRecorder
+
+-(id)init{
+    self = [super init];
+    if(self){
+        
+        static BOOL _audioSessionInited = NO;
+        if(!_audioSessionInited){
+            // Force to initialize the audio session once, but deprecated in iOS 7. See apple doc for more
+            _audioSessionInited = YES;
+            AudioSessionInitialize(NULL, NULL, NULL, NULL);
+        }
+        self.bufferedVoiceData = [NSMutableData data];
+        // register for app resign/active notifications for recorder state
+        [self _registerForBackgroundNotifications];
+    }
+    return self;
+}
+
+-(void)dealloc{
+    [self _unregisterForBackgroundNotifications];
+
+    [self stop:NO];
+    
+    [self _disposeAudioQueue];
+    
+    self.originCategory=nil;
+}
+
+-(void)start{
+    // perform the permission check
+    AVAudioSession *audioSession = [AVAudioSession sharedInstance];
+    self.originCategory = audioSession.category;
+ 
+    [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker error:nil];
+
+    BOOL isHeadsetMic = false;
+    NSArray* inputs = [audioSession availableInputs];
+    AVAudioSessionPortDescription *preBuiltInMic = nil;
+    for (AVAudioSessionPortDescription* port in inputs) {
+        if ([port.portType isEqualToString:AVAudioSessionPortBuiltInMic]) {
+            preBuiltInMic = port;
+        } else if ([port.portType isEqualToString:AVAudioSessionPortHeadsetMic]) {
+            isHeadsetMic = true;
+        }
+    }
+    // 寻找期望的麦克风
+    AVAudioSessionPortDescription *builtInMic = nil;
+    if (!isHeadsetMic) {
+        if (preBuiltInMic != nil)
+            builtInMic = preBuiltInMic;
+        for (AVAudioSessionDataSourceDescription* descriptions in builtInMic.dataSources) {
+            if ([descriptions.orientation isEqual:AVAudioSessionOrientationFront]) {
+                [builtInMic setPreferredDataSource:descriptions error:nil];
+                [audioSession setPreferredInput:builtInMic error:nil];
+                NSLog(@"mic in %@ %@", builtInMic.portType, descriptions.description);
+                break;
+            }
+        }
+    } else {
+        NSLog(@"mic isHeadsetMic %@", builtInMic.portType);
+    }
+
+    //    [audioSession setInputDataSource:AVAudioSessionOrientationBack error:nil];
+    if ([audioSession respondsToSelector:@selector(requestRecordPermission:)]) {
+        [audioSession performSelector:@selector(requestRecordPermission:) withObject:^(BOOL allow){
+            if(allow){
+                [self _start];
+                
+            }else{
+                // no permission
+                ;
+            }
+        }];
+    }else{
+        [self _start];
+    }
+}
+
+-(void)_start{
+    if(self.state == STATE_START){
+        NSLog(@"in recorder _start, state has started!");
+        return;
+    }
+    
+    if([self _createAudioQueue] && [self _startAudioQueue]){
+        self.bufferedVoiceData = [NSMutableData data];
+        self.state = STATE_START;
+        // we're started, notify the delegate
+        if([_delegate respondsToSelector:@selector(recorderDidStart)]){
+            dispatch_async(dispatch_get_main_queue(), ^{
+                [self->_delegate recorderDidStart];
+            });
+        }
+    }else{
+        ;
+    }
+}
+
+-(void)stop:(BOOL)shouldNotify{
+    if(self.state == STATE_STOP){
+        NSLog(@"in recorder stop, state has stopped!");
+        return;
+    }
+    
+    self.state = STATE_STOP;
+    
+    [self _stopAudioQueue];
+    [self _disposeAudioQueue];
+    
+    self.bufferedVoiceData = nil;
+    [[AVAudioSession sharedInstance] setCategory:self.originCategory error:nil];
+    
+    if(shouldNotify && _delegate){
+        dispatch_async(dispatch_get_main_queue(), ^{
+            [self->_delegate recorderDidStop];
+        });
+    }
+}
+
+-(BOOL)isStarted{
+    return self.state == STATE_START;
+}
+
+#pragma mark - Internal implementations
+
+-(void)_updateCurrentVoiceVolume{
+    if (mQueue) {
+        //FIXME - delay calculate the volume
+        static int skipFrame = 0;
+        if(skipFrame++ == 3){
+            skipFrame = 0;
+            // 如果要获得多个通道数据,需要用数组
+            // 这里没有去处理多个通道的数据显示,直接就显示最后一个通道的结果了
+            UInt32 data_sz = sizeof(AudioQueueLevelMeterState);
+            AudioQueueLevelMeterState levelMeter;
+            OSErr status = AudioQueueGetProperty(mQueue, kAudioQueueProperty_CurrentLevelMeterDB, &levelMeter, &data_sz);
+            if (status == noErr) {
+                _currentVoiceVolume = (levelMeter.mAveragePower+50)*2;
+            }
+        }
+    }
+}
+
+static void inputBufferHandler(void *                          inUserData,
+                               AudioQueueRef                   inAQ,
+                               AudioQueueBufferRef             inBuffer,
+                               const AudioTimeStamp *          inStartTime,
+                               UInt32                          inNumberPacketDescriptions,
+                               const AudioStreamPacketDescription *inPacketDescs){
+    @autoreleasepool {
+        
+        NlsVoiceRecorder *recorder = (__bridge NlsVoiceRecorder*) inUserData;
+        if(recorder.isStarted){
+            // 有时候AuduioQueueBuffer大小并非是预设的640,需要缓冲
+            NSData *frame = [recorder _bufferPCMFrame:inBuffer];
+            if(frame){
+                [recorder _handleVoiceFrame:frame];
+            }
+            AudioQueueEnqueueBuffer(inAQ, inBuffer, 0, NULL);
+        }else{
+            NSLog(@"WARN: - recorder is stopped, ignoring the callback data %d bytes",(int)inBuffer->mAudioDataByteSize);
+        }
+    }
+}
+
+/*
+ * Allocate audio queue and buffers
+ */
+-(BOOL) _createAudioQueue{
+    @synchronized(self){
+        if(mQueue != NULL){
+            return YES;
+        }
+        // parameters 设置AudioQueue相关参数
+        AudioStreamBasicDescription format;
+        memset(&format, 0, sizeof(format));
+        format.mFormatID = kAudioFormatLinearPCM;
+        format.mSampleRate = 16000;
+        format.mChannelsPerFrame = 1;
+        format.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked;
+        format.mBitsPerChannel = 16;
+        format.mBytesPerPacket =  (format.mBitsPerChannel >> 3) * format.mChannelsPerFrame;
+        format.mBytesPerFrame = format.mBytesPerPacket;
+        format.mFramesPerPacket = 1;
+        
+        // queue
+        OSStatus result = AudioQueueNewInput(&format, inputBufferHandler, (__bridge void * _Nullable)(self), NULL, NULL, 0, &mQueue);
+        if (result != noErr) {
+            mQueue = NULL;
+            return NO;
+        }
+        AudioQueueSetParameter(mQueue, kAudioQueueParam_Volume, 1.0f);
+
+        return YES;
+    }
+}
+
+-(void) _disposeAudioQueue{
+    if(mQueue == NULL){
+        return;
+    }
+    
+    AudioQueueDispose(mQueue, true);
+    mQueue = NULL;
+}
+
+-(BOOL) _startAudioQueue{
+    NSAssert(mQueue,@"mQueue is null");
+    
+    OSStatus result = noErr;
+    
+    // buffers
+    AudioQueueBufferRef queueBuffer;
+    for (int i = 0; i < QUEUE_BUFFER_COUNT; ++i) {
+        queueBuffer = NULL;
+        if((result = AudioQueueAllocateBuffer(mQueue, QUEUE_BUFFER_SIZE, &queueBuffer) != noErr)){
+            NSLog(@"AudioQueueAllocateBuffer error %d", (int)result);
+            [self _disposeAudioQueue];
+            return NO;
+        }
+        if((result = AudioQueueEnqueueBuffer(mQueue, queueBuffer, 0, NULL)) != noErr) {
+            NSLog(@"AudioQueueEnqueueBuffer error %d", (int)result);
+            [self _disposeAudioQueue];
+            return NO;
+        }
+    }
+    
+    if ((result = AudioQueueStart(mQueue, NULL)) != noErr) {
+        NSLog(@"AudioQueueStart error %d",(int)result);
+        [self _disposeAudioQueue];
+        return NO;
+    }
+    
+    //TODO - do we need level metering?
+    UInt32 val = 1;
+    AudioQueueSetProperty(mQueue, kAudioQueueProperty_EnableLevelMetering, &val, sizeof(UInt32));
+    
+    return YES;
+}
+
+-(void) _stopAudioQueue{
+    if(mQueue == NULL){
+        return;
+    }
+    AudioQueueStop(mQueue, true);
+    AudioSessionSetActive(NO);
+}
+
+
+/*
+ * AudioQueue 返回的 frame长度不确定,这里做一个缓冲,确保满了640bytes以后,返回。
+ * 640 bytes = 320 frames/16bit = 20ms
+ */
+
+- (NSData*) _bufferPCMFrame:(AudioQueueBufferRef)aqBuffer{
+    NSAssert(_bufferedVoiceData != nil,@"_bufferVoiceData is nil" );
+    
+    NSInteger nBufferSpaceLeft = PCM_FRAME_BYTE_SIZE - _bufferedVoiceData.length;
+    
+    NSInteger nBytesReceived = aqBuffer->mAudioDataByteSize;
+    NSInteger nBytesToCopy = nBufferSpaceLeft >= nBytesReceived ?nBytesReceived:nBufferSpaceLeft;
+    NSInteger nBytesLeft = nBytesReceived - nBytesToCopy;
+    
+    [_bufferedVoiceData appendBytes:aqBuffer->mAudioData length:nBytesToCopy];
+    
+    if(_bufferedVoiceData.length == PCM_FRAME_BYTE_SIZE){
+        // buffer is full
+        NSData *frame = [NSData dataWithData:_bufferedVoiceData];
+        // reset the buffer
+        _bufferedVoiceData.length = 0;
+        
+        // save the left partial data
+        if(nBytesLeft > 0){
+            [_bufferedVoiceData appendBytes:(aqBuffer->mAudioData + nBytesToCopy) length:nBytesLeft];
+        }
+        return frame;
+    }
+    
+    return nil;
+}
+
+-(void) _handleVoiceFrame:(NSData*)voiceFrame {
+    [self _updateCurrentVoiceVolume];
+    if(_delegate){
+        if(/* DISABLES CODE */ (true)){
+            [_delegate voiceRecorded:voiceFrame];
+        }else{
+            [((NSObject*)_delegate) performSelectorOnMainThread:@selector(voiceRecorded:) withObject:voiceFrame waitUntilDone:NO];
+        }
+    }
+}
+
+#pragma mark - Background Notifications
+- (void)_registerForBackgroundNotifications {
+    [[NSNotificationCenter defaultCenter] addObserver:self
+                                             selector:@selector(_appResignActive)
+                                                 name:UIApplicationWillResignActiveNotification
+                                               object:nil];
+    
+    [[NSNotificationCenter defaultCenter] addObserver:self
+                                             selector:@selector(_appEnterForeground)
+                                                 name:UIApplicationWillEnterForegroundNotification
+                                               object:nil];
+}
+
+- (void)_unregisterForBackgroundNotifications {
+    [[NSNotificationCenter defaultCenter] removeObserver:self];
+}
+
+
+- (void)_appResignActive {
+    _inBackground = true;
+    AudioSessionSetActive(NO);
+}
+
+- (void)_appEnterForeground {
+    _inBackground = false;
+}
+
+@end

+ 19 - 0
BFFramework/Classes/Utils/PQSpeechTranscriberUtil.h

@@ -0,0 +1,19 @@
+//
+//  PQSpeechTranscriber.h
+//  Pods
+//
+//  Created by ak on 2021/12/8.
+//  功能:使用阿里云 SDK 进行实时语音转文字工具类
+
+#import <Foundation/Foundation.h>
+
+NS_ASSUME_NONNULL_BEGIN
+
+@interface PQSpeechTranscriberUtil : NSObject
+
+///  开始识别
+- (void)startTranscriber;
+@end
+
+NS_ASSUME_NONNULL_END
+

+ 277 - 0
BFFramework/Classes/Utils/PQSpeechTranscriberUtil.m

@@ -0,0 +1,277 @@
+//
+//  PQSpeechTranscriberUtil.m
+//  BFFramework
+//
+//  Created by ak on 2021/12/8.
+//  说明文档:https://help.aliyun.com/document_detail/173528.html
+
+#import <Foundation/Foundation.h>
+
+#import "NeoNui.h"
+
+#import "PQSpeechTranscriberUtil.h"
+#import <AdSupport/ASIdentifierManager.h>
+#import "NLSVoiceRecorder.h"
+
+@interface PQSpeechTranscriberUtil ()<NeoNuiSdkDelegate,NlsVoiceRecorderDelegate> {
+
+    
+}
+@property(nonatomic,strong) NeoNui* nui;
+@property(nonatomic,strong) NlsVoiceRecorder *voiceRecorder;
+@property(nonatomic,strong) NSMutableData *recordedVoiceData;
+//
+@end
+
+@implementation PQSpeechTranscriberUtil
+- (id)init {
+    
+    self = [super init];
+    
+    
+    _voiceRecorder = [[NlsVoiceRecorder alloc] init];
+    _voiceRecorder.delegate = self;
+ 
+    [self initNui];
+    
+    NSString *version = [NSString stringWithUTF8String:[_nui nui_get_version]];
+    NSLog(@"nui_get_version is %@",version);
+     
+    
+    return self;
+    
+    
+}
+
+
+///  开始识别
+- (void)startTranscriber{
+
+    if (_nui != nil) {
+        [_nui nui_dialog_start:MODE_P2T dialogParam:NULL];
+    } else {
+        NSLog(@"in StartButHandler no nui alloc");
+    }
+}
+
+- (void)endTranscriber{
+    self.recordedVoiceData = nil;
+    
+    if (_nui != nil) {
+        [_nui nui_dialog_cancel:NO];
+        [_voiceRecorder stop:YES];
+        dispatch_async(dispatch_get_main_queue(), ^{
+        
+        });
+    } else {
+        NSLog(@"in StopButHandler no nui alloc");
+    }
+}
+
+//初始化SDK
+- (void) initNui {
+    if (_nui == NULL) {
+        _nui = [NeoNui get_instance];
+        _nui.delegate = self;
+    }
+    //请注意此处的参数配置,其中账号相关需要在Utils.m getTicket 方法中填入后才可访问服务
+    NSString * initParam = [self genInitParams];
+
+   int initcode =  [_nui nui_initialize:[initParam UTF8String] logLevel:LOG_LEVEL_VERBOSE saveLog:false];
+    NSLog(@"初始化结果%d",initcode);
+    
+    NSString * parameters = [self genParams];
+    int setparamscode = [_nui nui_set_params:[parameters UTF8String]];
+    
+    NSLog(@"设置参数结果%d",setparamscode);
+}
+//析构SDK
+- (void)terminateNui {
+    [_nui nui_release];
+}
+
+//Get Document Dir
+-(NSString *)dirDoc {
+    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
+    NSString *documentsDirectory = [paths objectAtIndex:0];
+    NSLog(@"app_home_doc: %@",documentsDirectory);
+    return documentsDirectory;
+}
+
+//create dir for saving files
+-(NSString *)createDir {
+    NSString *documentsPath = [self dirDoc];
+    NSFileManager *fileManager = [NSFileManager defaultManager];
+    NSString *testDirectory = [documentsPath stringByAppendingPathComponent:@"voices"];
+    // 创建目录
+    BOOL res=[fileManager createDirectoryAtPath:testDirectory withIntermediateDirectories:YES attributes:nil error:nil];
+    if (res) {
+        NSLog(@"文件夹创建成功");
+    }else
+        NSLog(@"文件夹创建失败");
+    return testDirectory;
+}
+
+
+-(NSString*) genInitParams {
+    
+    NSString *strResourcesBundle = [[NSBundle mainBundle] pathForResource:@"Resources" ofType:@"bundle"];
+    NSString *bundlePath = [[NSBundle bundleWithPath:strResourcesBundle] resourcePath];
+    NSString *id_string = [[[ASIdentifierManager sharedManager] advertisingIdentifier] UUIDString];
+    NSString *debug_path = [self createDir];
+//
+    NSMutableDictionary *dictM = [NSMutableDictionary dictionary];
+
+    [dictM setObject:bundlePath forKey:@"workspace"];
+    [dictM setObject:debug_path forKey:@"debug_path"];
+    [dictM setObject:id_string forKey:@"device_id"];
+    [dictM setObject:@"false" forKey:@"save_wav"];
+    
+    //从阿里云获取appkey和token进行语音服务访问
+    [dictM setObject:@"oTOh8zDVK6iswF9o" forKey:@"app_key"];
+    [dictM setObject:@"f9fea6e2a72e4187b24facf30016e090" forKey:@"token"];
+
+    //由于token 24小时过期,可以参考getTicket实现从阿里云服务动态获取
+//    [_utils getTicket:dictM];
+    [dictM setObject:@"wss://nls-gateway.cn-shanghai.aliyuncs.com:443/ws/v1" forKey:@"url"];
+    
+    NSData *data = [NSJSONSerialization dataWithJSONObject:dictM options:NSJSONWritingPrettyPrinted error:nil];
+    NSString * jsonStr = [[NSString alloc]initWithData:data encoding:NSUTF8StringEncoding];
+    return jsonStr;
+}
+-(NSString*) genParams {
+    //https://help.aliyun.com/document_detail/173528.html 参数说明
+    NSMutableDictionary *nls_config = [NSMutableDictionary dictionary];
+    //是否返回中间识别结果,默认值:False。
+    [nls_config setValue:@YES forKey:@"enable_intermediate_result"];
+
+    //是否在后处理中添加标点,默认值:False
+    [nls_config setValue:@YES forKey:@"enable_punctuation_prediction"];
+    //是否在后处理中执行ITN。设置为true时,中文数字将转为阿拉伯数字输出,默认值:False。
+    [nls_config setValue:@YES forKey:@"enable_inverse_text_normalization"];
+    [nls_config setValue:@YES forKey:@"enable_voice_detection"];
+    [nls_config setValue:@10000 forKey:@"max_start_silence"];
+    [nls_config setValue:@800 forKey:@"max_end_silence"];
+    //语音断句检测阈值,静音时长超过该阈值被认为断句。取值范围:200ms~2000ms,默认值:800ms。
+    [nls_config setValue:@800 forKey:@"max_sentence_silence"];
+    //是否开启返回词信息。默认值:False。
+    [nls_config setValue:@NO forKey:@"enable_words"];
+    [nls_config setValue:@16000 forKey:@"sample_rate"];
+    //音频编码格式,支持OPUS编码和PCM原始音频。默认值:OPUS。
+    [nls_config setValue:@"opus" forKey:@"sr_format"];
+    
+    //ADD BY AK
+    [nls_config setValue:@YES forKey:@"enable_sentence_detection"];
+    
+    
+    NSMutableDictionary *dictM = [NSMutableDictionary dictionary];
+    [dictM setObject:nls_config forKey:@"nls_config"];
+    [dictM setValue:@(SERVICE_TYPE_SPEECH_TRANSCRIBER) forKey:@"service_type"];
+    
+    
+//    如果有HttpDns则可进行设置
+//    [dictM setObject:[_utils getDirectIp] forKey:@"direct_ip"];
+    
+    NSData *data = [NSJSONSerialization dataWithJSONObject:dictM options:NSJSONWritingPrettyPrinted error:nil];
+    NSString * jsonStr = [[NSString alloc]initWithData:data encoding:NSUTF8StringEncoding];
+    return jsonStr;
+}
+
+
+
+#pragma mark - Voice Recorder Delegate
+-(void) recorderDidStart{
+    NSLog(@"recorderDidStart");
+}
+
+-(void) recorderDidStop{
+    [self.recordedVoiceData setLength:0];
+    NSLog(@"recorderDidStop");
+}
+
+-(void) voiceRecorded:(NSData*) frame{
+    @synchronized(_recordedVoiceData){
+        [_recordedVoiceData appendData:frame];
+    }
+}
+
+-(void) voiceDidFail:(NSError*)error{
+    NSLog(@"recorder error ");
+}
+
+#pragma mark - Nui Listener
+-(void)onNuiEventCallback:(NuiCallbackEvent)nuiEvent
+                   dialog:(long)dialog
+                kwsResult:(const char *)wuw
+                asrResult:(const char *)asr_result
+                 ifFinish:(BOOL)finish
+                  retCode:(int)code {
+    NSLog(@"onNuiEventCallback event %d finish %d", nuiEvent, finish);
+    if (nuiEvent == EVENT_ASR_PARTIAL_RESULT || nuiEvent == EVENT_ASR_RESULT || nuiEvent == EVENT_SENTENCE_END) {
+       
+        NSString *result = [NSString stringWithUTF8String:asr_result];
+        NSLog(@"识别结果: %@ finish %d", result, finish);
+    } else if (nuiEvent == EVENT_ASR_ERROR) {
+        NSLog(@"EVENT_ASR_ERROR error[%d]", code);
+    } else if (nuiEvent == EVENT_MIC_ERROR) {
+        NSLog(@"MIC ERROR");
+        [_voiceRecorder stop:YES];
+        [_voiceRecorder start];
+    }
+    //finish 为真(可能是发生错误,也可能是完成识别)表示一次任务生命周期结束,可以开始新的识别
+    if (finish) {
+        dispatch_async(dispatch_get_main_queue(), ^{
+           
+        });
+    }
+    
+    return;
+}
+
+//录音数据回调,在该回调中填充录音数据。
+-(int)onNuiNeedAudioData:(char *)audioData length:(int)len {
+    static int emptyCount = 0;
+    @autoreleasepool {
+        @synchronized(_recordedVoiceData){
+            if (_recordedVoiceData.length > 0) {
+                int recorder_len = 0;
+                if (_recordedVoiceData.length > len)
+                    recorder_len = len;
+                else
+                    recorder_len = _recordedVoiceData.length;
+                NSData *tempData = [_recordedVoiceData subdataWithRange:NSMakeRange(0, recorder_len)];
+                [tempData getBytes:audioData length:recorder_len];
+                tempData = nil;
+                NSInteger remainLength = _recordedVoiceData.length - recorder_len;
+                NSRange range = NSMakeRange(recorder_len, remainLength);
+                [_recordedVoiceData setData:[_recordedVoiceData subdataWithRange:range]];
+                emptyCount = 0;
+                return recorder_len;
+            } else {
+                if (emptyCount++ >= 50) {
+                    NSLog(@"_recordedVoiceData length = %lu! empty 50times.", (unsigned long)_recordedVoiceData.length);
+                    emptyCount = 0;
+                }
+                return 0;
+            }
+
+        }
+    }
+    return 0;
+}
+
+-(void)onNuiAudioStateChanged:(NuiAudioState)state{
+    NSLog(@"onNuiAudioStateChanged state=%u", state);
+    if (state == STATE_CLOSE || state == STATE_PAUSE) {
+        [_voiceRecorder stop:YES];
+    } else if (state == STATE_OPEN){
+        self.recordedVoiceData = [NSMutableData data];
+        [_voiceRecorder start];
+    }
+}
+
+-(void)onNuiRmsChanged:(float)rms {
+    NSLog(@"onNuiRmsChanged rms=%f", rms);
+}
+
+@end