讯飞语音集成与语音识别demo
2019-05-30 本文已影响0人
高思阳
初始化
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions
{
...
[self initIFlySetting];
...
}
- (void)initIFlySetting
{
// 设置sdk的log等级,log保存在下面设置的工作路径中
[IFlySetting setLogFile:LVL_ALL];
// 打开输出在console的log开关
//[IFlySetting showLogcat:YES];
// 设置sdk的工作路径
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES);
NSString *cachePath = [paths objectAtIndex:0];
[IFlySetting setLogFilePath:cachePath];
// 创建语音配置,appid必须要传入,仅执行一次则可
NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@", IFLY_APPID];
// 所有服务启动前,需要确保执行createUtility
[IFlySpeechUtility createUtility:initString];
}//
使用
#import "iflyMSC/iflyMSC.h"
#import "ISRDataHelper.h"
#import "IATConfig.h"
@interface ViewController () <UITextViewDelegate, AVAudioRecorderDelegate,
IFlySpeechRecognizerDelegate>
@property (nonatomic, strong) UIButton *btnAudio;
@property (nonatomic, strong) IFlySpeechRecognizer *iFlySpeechRecognizer;
@property (nonatomic, assign) BOOL shouldContinueRecognize;//是否应该继续识别(判断用户是否取消了识别)
@end
-(IFlySpeechRecognizer *)iFlySpeechRecognizer
{
if(!_iFlySpeechRecognizer)
{
// 创建语音识别对象
_iFlySpeechRecognizer = [IFlySpeechRecognizer sharedInstance];
// 设置识别参数
// 设置为听写模式
[_iFlySpeechRecognizer setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]];
// asr_audio_path 是录音文件名,设置value为nil或者为空取消保存,默认保存目录在Library/cache下。
[_iFlySpeechRecognizer setParameter:@"iat.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];
_iFlySpeechRecognizer.delegate = self;
IATConfig *instance = [IATConfig sharedInstance];
// 设置最长录音时间
[_iFlySpeechRecognizer setParameter:instance.speechTimeout forKey:[IFlySpeechConstant SPEECH_TIMEOUT]];
// 设置后端点
[_iFlySpeechRecognizer setParameter:instance.vadEos forKey:[IFlySpeechConstant VAD_EOS]];
// 设置前端点
[_iFlySpeechRecognizer setParameter:instance.vadBos forKey:[IFlySpeechConstant VAD_BOS]];
// 网络等待时间
[_iFlySpeechRecognizer setParameter:@"20000" forKey:[IFlySpeechConstant NET_TIMEOUT]];
// 设置采样率,推荐使用16K
[_iFlySpeechRecognizer setParameter:instance.sampleRate forKey:[IFlySpeechConstant SAMPLE_RATE]];
// 设置语言
[_iFlySpeechRecognizer setParameter:instance.language forKey:[IFlySpeechConstant LANGUAGE]];
// 设置方言
[_iFlySpeechRecognizer setParameter:instance.accent forKey:[IFlySpeechConstant ACCENT]];
// 设置是否返回标点符号
[_iFlySpeechRecognizer setParameter:instance.dot forKey:[IFlySpeechConstant ASR_PTT]];
}
return _iFlySpeechRecognizer;
}
- (void)cancelListenWrite
{
if (_iFlySpeechRecognizer) {
[_iFlySpeechRecognizer stopListening];
[_iFlySpeechRecognizer cancel];
}
}
#pragma mark - Init
- (void)dealloc
{
[self cancelListenWrite];
}
//录音并且识别按钮点击事件
- (void)btnAudioInputEvent:(UIButton *)btn
{
btn.selected = !btn.selected;
if (btn.selected) {
self.shouldContinueRecognize = YES;
[self.iFlySpeechRecognizer startListening];
}
else
{
self.shouldContinueRecognize = NO;
[self.iFlySpeechRecognizer stopListening];
}
}
#pragma mark - IFlySpeechRecognizerDelegate
// 识别结果返回代理
/*
在识别过程中可能会多次回调此函数,你最好不要在此回调函数中进行界面的更改等操作,只需要将回调的结果保存起来。 使用results的示例如下: - (void) onResults:(NSArray *) results{ NSMutableString *result = [[NSMutableString alloc] init]; NSDictionary *dic = [results objectAtIndex:0]; for (NSString *key in dic){ [result appendFormat:"%",key];//合并结果 } }
*/
- (void)onResults:(NSArray *) results isLast:(BOOL)isLast
{
NSMutableString *resultString = [[NSMutableString alloc] init];
NSDictionary *dic = results[0];
for (NSString *key in dic) {
[resultString appendFormat:@"%@", key];
}
NSString * resultFromJson = [ISRDataHelper stringFromJson:resultString];
NSLog(@"单次听写结果:%@", resultFromJson);
[self.resultText appendString:resultFromJson];
if (isLast){
NSLog(@"听写结果:%@", self.resultText);
}
}
// 识别会话结束返回代理
- (void)onCompleted:(IFlySpeechError *)error
{
NSLog(@"IFly Listen Completed:%@",error.errorDesc);
if (self.shouldContinueRecognize) {
[self.iFlySpeechRecognizer startListening];
}
}
// 停止录音回调
- (void)onEndOfSpeech
{
NSLog(@"IFly Listen End of Speech");
}
// 开始录音回调
- (void)onBeginOfSpeech
{
NSLog(@"IFly Listen Begin of Speech");
}
// 音量回调
- (void)onVolumeChanged:(int)volume
{
}
// 会话取消回调
- (void)onCancel
{
NSLog(@"IFly Listen Cancel");
}
//更新文本示例(把每次识别到的文字传入)
- (void)updateTextViewTextInsertedString:(NSString *)text
{
if (text.length < 1) {
return;
}
// 获得光标所在的位置
NSUInteger location = _textView.selectedRange.location;
if (location == NSNotFound ||
location >= _textView.text.length) {
if (_textView.text.length < 1) {
text = [text substringFromIndex:1];
}
NSString *currentText = _textView.text;
if (currentText.length < 1) {
currentText = @"";
}
_textView.text = [NSString stringWithFormat:@"%@%@", currentText, text];
[self textViewDidChange:_textView];
return;
}
// 如果光标之前没有内容,去掉前面的逗号
if ([_textView.text substringToIndex:location].length < 1) {
if ([text hasPrefix:@","]) {
if (text.length == 1) {
text = @"";
}
else {
text = [text substringFromIndex:1];
}
}
}
if (_textView.text.length < 1) {
_textView.text = text;
[self textViewDidChange:_textView];
return;
}
if ([_textView.text substringFromIndex:location].length > 0) {
text = [NSString stringWithFormat:@"%@", text];
}
NSString *preText = [_textView.text substringToIndex:location];
if (preText.length < 1) {
preText = @"";
}
NSString *lastText = [_textView.text substringFromIndex:location];
if (lastText.length < 1) {
lastText = @"";
}
NSString *result = [NSString stringWithFormat:@"%@%@%@", preText, text, lastText];
_textView.text = result;
[self textViewDidChange:_textView];
// 调整光标
// _textView.selectedRange = NSMakeRange(location + text.length + 1, 1);
}