ios下的語音開發——科大訊飛使用
科大訊飛是國內做的比較好的語音開發sdk,首先在網站上註冊賬號等一系列流程下來之後,獲得id的值
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptionsdang
{
//xxxxxxx是自己的id的值,timeout是超時時間
NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@,timeout=%@",@"xxxxxxx",@"20000"];
//所有服務啟動前,需要確保執行
[IFlySpeechUtility createUtility:initString];
}
設定完開啟之後便要對其定義屬性和進行初始化
#pragma mark - 語音識別使用的屬性和控制元件
@property (nonatomic, strong) NSString *pcmFilePath;//音訊檔案路徑
@property (nonatomic, strong) IFlyRecognizerView *iflyRecognizerView;//帶介面的識別物件
@property (nonatomic, strong) IFlyDataUploader *uploader;//資料上傳物件
@property (nonatomic, strong) PopupView *popUpView;//介面
@property (nonatomic,strong) NSString *result ;//聽寫結果
#pragma mark - 語音部分
-(void)viewDidLoad
{
[super viewDidload];
[self Call_JS_Voice];
}
-(void)viewWillAppear:(BOOL)animated
{
[self initRecognizer];//語音初始化
_result = [NSString new];//語音結果
}
-(void)viewDidDisappear:(BOOL)animated
{
[_iflyRecognizerView cancel]; //取消識別
[_iflyRecognizerView setDelegate:nil];
[_iflyRecognizerView setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];
}
/**
初始化語音引數
*/
-(void)initRecognizer
{
//有介面
//單例模式,UI的例項
if (_iflyRecognizerView == nil) {
//UI顯示劇中
_iflyRecognizerView= [[IFlyRecognizerView alloc] initWithCenter:self.view.center];
[_iflyRecognizerView setParameter:@"" forKey:[IFlySpeechConstant PARAMS]];
//設定聽寫模式
[_iflyRecognizerView setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]];
}
_iflyRecognizerView.delegate = self;
if (_iflyRecognizerView != nil) {
//設定最長錄音時間
[_iflyRecognizerView setParameter:@"30000" forKey:[IFlySpeechConstant SPEECH_TIMEOUT]];
//設定後端點
[_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_EOS]];
//設定前端點
[_iflyRecognizerView setParameter:@"3000" forKey:[IFlySpeechConstant VAD_BOS]];
//網路等待時間
[_iflyRecognizerView setParameter:@"20000" forKey:[IFlySpeechConstant NET_TIMEOUT]];
//設定取樣率,推薦使用16K
[_iflyRecognizerView setParameter:@"16000" forKey:[IFlySpeechConstant SAMPLE_RATE]];
[_iflyRecognizerView setParameter:@"zh_cn" forKey:[IFlySpeechConstant LANGUAGE]];
//設定是否返回標點符號
[_iflyRecognizerView setParameter:@"0" forKey:[IFlySpeechConstant ASR_PTT]];
}
}
/**
有介面,聽寫結果回撥
resultArray:聽寫結果
isLast:表示最後一次
****/
- (void)onResult:(NSArray *)resultArray isLast:(BOOL)isLast
{
//根據聽寫的結果跳轉到搜尋
NSMutableString *result = [[NSMutableString alloc] init];
NSDictionary *dic = [resultArray objectAtIndex:0];
for (NSString *key in dic) {
[result appendFormat:@"%@",key];
}
_result = [NSString stringWithFormat:@"%@%@",_result,result];
[_iflyRecognizerView cancel];
}
/**
開啟語音
*/
-(void)Call_JS_Voice
{
if(_iflyRecognizerView == nil)
{
[self initRecognizer ];
}
//設定音訊來源為麥克風
[_iflyRecognizerView setParameter:IFLY_AUDIO_SOURCE_MIC forKey:@"audio_source"];
//設定聽寫結果格式為json
[_iflyRecognizerView setParameter:@"plain" forKey:[IFlySpeechConstant RESULT_TYPE]];
//保存錄音檔案,儲存在sdk工作路徑中,如未設定工作路徑,則預設儲存在library/cache下
[_iflyRecognizerView setParameter:@"asr.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];
_result = @"";
[_iflyRecognizerView start];
}
/**
開始講話
*/
- (void) onBeginOfSpeech
{
NSLog(@"onBeginOfSpeech");
}
/**
聽寫取消回撥
****/
- (void) onCancel
{
NSLog(@"識別取消");
}
-(void)onError:(IFlySpeechError *)error
{
}
-(void)onResults:(NSArray *)results isLast:(BOOL)isLast
{
}