首先使用的是科大讯飞的sdk
1.语音识别部分
AppDelegate.m
#import "AppDelegate.h"
#import <iflyMSC/iflyMSC.h>
@interface AppDelegate ()
@end
@implementation AppDelegate
- (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
NSString *initString = [[NSString alloc] initWithFormat:@"appid=%@",@"你的app ID"];
[IFlySpeechUtility createUtility:initString];
return YES;
}
.h文件
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@end
.m实现文件
#import "ViewController.h"
#import <iflyMSC/iflyMSC.h>
@interface ViewController ()<IFlySpeechRecognizerDelegate>
{
//不带界面的识别对象
IFlySpeechRecognizer *iFlySpeechRecognizer;
}
@property(nonatomic,strong)UILabel * showLabel;
@end
@implementation ViewController
-(void)viewDidLoad{
[super viewDidLoad];
self.showLabel = [[UILabel alloc]initWithFrame:CGRectMake(8,200,300,200)];
self.showLabel.textColor = [UIColor redColor];
[self.view addSubview:self.showLabel];
//1.创建语音听写对象
iFlySpeechRecognizer = [IFlySpeechRecognizer sharedInstance]; //设置听写模式
iFlySpeechRecognizer.delegate = self;
[iFlySpeechRecognizer setParameter:@"iat" forKey:[IFlySpeechConstant IFLY_DOMAIN]];
//2.设置听写参数
[iFlySpeechRecognizer setParameter: @"iat" forKey: [IFlySpeechConstant IFLY_DOMAIN]];
//asr_audio_path是录音文件名,设置value为nil或者为空取消保存,默认保存目录在 Library/cache下。
[iFlySpeechRecognizer setParameter:@"asrview.pcm" forKey:[IFlySpeechConstant ASR_AUDIO_PATH]];
UIButton * startButton = [[UIButton alloc]initWithFrame:CGRectMake(20,100,100,50)];
[startButton addTarget:self action:@selector(startButtonDidClick) forControlEvents:UIControlEventTouchUpInside];
startButton.backgroundColor = [UIColor orangeColor];
[startButton setTitle:@"开始录音" forState:UIControlStateNormal];
[self.view addSubview:startButton];
UIButton * endButton = [[UIButton alloc]initWithFrame:CGRectMake(200,100,100,50)];
[endButton addTarget:self action:@selector(endButtonDidClick) forControlEvents:UIControlEventTouchUpInside];
endButton.backgroundColor = [UIColor orangeColor];
[endButton setTitle:@"结束录音" forState:UIControlStateNormal];
[self.view addSubview:endButton];
}
-(void)startButtonDidClick{
//3.启动识别服务
[iFlySpeechRecognizer startListening];
}
-(void)endButtonDidClick{
//识别服务
[iFlySpeechRecognizer stopListening];
}
- (void) onResults:(NSArray *) results isLast:(BOOL)isLast{
NSMutableString * resultString = [[NSMutableString alloc]init];
if (!isLast) {
NSDictionary *dic = results[0];
NSArray * keys = [dic allKeys];
for (NSString *key in keys) {
NSData * resData = [key dataUsingEncoding:NSUTF8StringEncoding];
NSDictionary * resultFromJson = [NSJSONSerialization JSONObjectWithData:resData options:NSJSONReadingAllowFragments error:nil];
NSArray * tempArray = resultFromJson[@"ws"];
for (NSDictionary * tempDic in tempArray) {
NSArray * cwArray = tempDic[@"cw"];
for (NSDictionary * resultDic in cwArray) {
NSString * str = [NSString stringWithFormat:@"%@",resultDic[@"w"]];
[resultString appendString:str];
}
}
}
self.showLabel.text = [NSString stringWithFormat:@"%@",resultString];
}
}
/*识别会话结束返回代理
@ param error 错误码,error.errorCode=0表示正常结束,非0表示发生错误。 */
- (void)onError: (IFlySpeechError *) error{
NSLog(@"%@",error.errorDesc);
}
/**
停止录音回调
****/
- (void) onEndOfSpeech {
}
/**
开始识别回调
****/
- (void) onBeginOfSpeech {
}
/**
音量回调函数 volume 0-30
****/
- (void) onVolumeChanged: (int)volume {
}
@end
2 文字变语音 语音合成播报部分
首先写一个工具当然这个不是我写的,能用^_^
.h 文件如下
#import <UIKit/UIKit.h>
#import <AVFoundation/AVSpeechSynthesis.h>
@interface SpeechSynthesizer : NSObject
+ (instancetype)sharedSpeechSynthesizer;
- (void)speakString:(NSString *)string;
- (void)stopSpeak;
@end
.m 文件如下
#import "SpeechSynthesizer.h"
@interface SpeechSynthesizer () <AVSpeechSynthesizerDelegate>
@property (nonatomic, strong, readwrite) AVSpeechSynthesizer *speechSynthesizer;
@end
@implementation SpeechSynthesizer
+ (instancetype)sharedSpeechSynthesizer
{
static id sharedInstance = nil;
static dispatch_once_t onceToken;
dispatch_once(&onceToken, ^{
sharedInstance = [[SpeechSynthesizer alloc] init];
});
return sharedInstance;
}
- (instancetype)init
{
if (self = [super init])
{
[self buildSpeechSynthesizer];
}
return self;
}
- (void)buildSpeechSynthesizer
{
if ([[[UIDevice currentDevice] systemVersion] floatValue] < 7.0)
{
return;
}
self.speechSynthesizer = [[AVSpeechSynthesizer alloc] init];
[self.speechSynthesizer setDelegate:self];
}
- (void)speakString:(NSString *)string
{
if (self.speechSynthesizer)
{
AVSpeechUtterance *aUtterance = [AVSpeechUtterance speechUtteranceWithString:string];
[aUtterance setVoice:[AVSpeechSynthesisVoice voiceWithLanguage:@"zh-CN"]];
//iOS语音合成在iOS8及以下版本系统上语速异常
if ([[[UIDevice currentDevice] systemVersion] floatValue] < 8.0)
{
aUtterance.rate = 0.25;//iOS7设置为0.25
}
else if ([[[UIDevice currentDevice] systemVersion] floatValue] < 9.0)
{
aUtterance.rate = 0.15;//iOS8设置为0.15
}
if ([self.speechSynthesizer isSpeaking])
{
[self.speechSynthesizer stopSpeakingAtBoundary:AVSpeechBoundaryWord];
}
[self.speechSynthesizer speakUtterance:aUtterance];
}
}
- (void)stopSpeak
{
if (self.speechSynthesizer)
{
[self.speechSynthesizer stopSpeakingAtBoundary:AVSpeechBoundaryImmediate];
}
}
@end
使用方法如下
语音播报“世界那么大我想去看看”
[[SpeechSynthesizer sharedSpeechSynthesizer] speakString:@"世界那么大我想去看看"];
停止播报
[[SpeechSynthesizer sharedSpeechSynthesizer] stopSpeak];