在IOS中,在做语音识别中,需要对语音进行抓取。
#import "GetAudioViewController.h"
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIKit.h>
#import <ImageIO/ImageIO.h>
#import <MobileCoreServices/MobileCoreServices.h>
#import <QuartzCore/QuartzCore.h>
@interface GetAudioViewController ()
{
AVAudioPlayer *_player;
AVAudioRecorder *_audiorecord;
NSTimer* _timerForPitch;
CAShapeLayer *_shapeLayer;
CADisplayLink* _displayLink;
__weak IBOutlet UIProgressView *_audioPower;
__weak IBOutlet UIButton *_record;
__weak IBOutlet UIButton *_pause;
__weak IBOutlet UIButton *_resume;
__weak IBOutlet UIButton *_stop;
__weak IBOutlet UIView *_viewForWave;
float Pitch;
NSInteger _recordEncoding;
CFTimeInterval _firstTimestamp;
NSInteger _loopCount;
}
@end
@implementation GetAudioViewController
- (void)viewDidLoad {
[super viewDidLoad];
}
-(void)cratePath:(NSString*)path
{
NSFileManager* filemanager = [NSFileManager defaultManager];
if(![filemanager fileExistsAtPath:path])
[filemanager createDirectoryAtPath:path
withIntermediateDirectories:YES
attributes:nil
error:nil];
}
- (UIBezierPath *)pathAtInterval:(NSTimeInterval) interval
{
UIBezierPath *path = [UIBezierPath bezierPath];
[path moveToPoint:CGPointMake(0, _viewForWave.bounds.size.height / 2.0)];
CGFloat fractionOfSecond = interval - floor(interval);
CGFloat yOffset = _viewForWave.bounds.size.height * sin(fractionOfSecond * M_PI * Pitch*8);
[path addCurveToPoint:CGPointMake(_viewForWave.bounds.size.width, _viewForWave.bounds.size.height / 2.0)
controlPoint1:CGPointMake(_viewForWave.bounds.size.width / 2.0, _viewForWave.bounds.size.height / 2.0 - yOffset)
controlPoint2:CGPointMake(_viewForWave.bounds.size.width / 2.0, _viewForWave.bounds.size.height / 2.0 + yOffset)];
return path;
}
- (void)addShapeLayer
{
_shapeLayer = [CAShapeLayer layer];
_shapeLayer.path = [[self pathAtInterval:2.0] CGPath];
_shapeLayer.fillColor = [[UIColor redColor] CGColor];
_shapeLayer.lineWidth = 1.0;
_shapeLayer.strokeColor = [[UIColor whiteColor] CGColor];
[_viewForWave.layer addSublayer:_shapeLayer];
}
- (void)handleDisplayLink:(CADisplayLink *)displayLink
{
if (!_firstTimestamp)
_firstTimestamp = displayLink.timestamp;
_loopCount++;
NSTimeInterval elapsed = (displayLink.timestamp - _firstTimestamp);
_shapeLayer.path = [[self pathAtInterval:elapsed] CGPath];
}
- (void)startDisplayLink
{
_displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(handleDisplayLink:)];
[_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
}
- (IBAction)recordClick:(id)sender {
_viewForWave.hidden = NO;
[self addShapeLayer];
[self startDisplayLink];
NSLog(@"startRecording");
_audiorecord = nil;
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryRecord error:nil];
NSMutableDictionary *recordSettings = [[NSMutableDictionary alloc] initWithCapacity:10];
if(_recordEncoding == 6)
{
[recordSettings setObject:[NSNumber numberWithInt: kAudioFormatLinearPCM] forKey: AVFormatIDKey];
[recordSettings setObject:[NSNumber numberWithFloat:44100.0] forKey: AVSampleRateKey];
[recordSettings setObject:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
[recordSettings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSettings setObject:[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsBigEndianKey];
[recordSettings setObject:[NSNumber numberWithBool:NO] forKey:AVLinearPCMIsFloatKey];
}
else
{
NSNumber *formatObject;
switch (_recordEncoding) {
case 1:
formatObject = [NSNumber numberWithInt: kAudioFormatMPEG4AAC];
break;
case 2:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleLossless];
break;
case 3:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleIMA4];
break;
case 4:
formatObject = [NSNumber numberWithInt: kAudioFormatiLBC];
break;
case 5:
formatObject = [NSNumber numberWithInt: kAudioFormatULaw];
break;
default:
formatObject = [NSNumber numberWithInt: kAudioFormatAppleIMA4];
}
[recordSettings setObject:formatObject forKey: AVFormatIDKey];
[recordSettings setObject:[NSNumber numberWithFloat:44100.0] forKey: AVSampleRateKey];
[recordSettings setObject:[NSNumber numberWithInt:2] forKey:AVNumberOfChannelsKey];
[recordSettings setObject:[NSNumber numberWithInt:12800] forKey:AVEncoderBitRateKey];
[recordSettings setObject:[NSNumber numberWithInt:16] forKey:AVLinearPCMBitDepthKey];
[recordSettings setObject:[NSNumber numberWithInt: AVAudioQualityHigh] forKey: AVEncoderAudioQualityKey];
}
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *soundFilePath = [docsDir
stringByAppendingPathComponent:@"recordTest.caf"];
NSURL *url = [NSURL fileURLWithPath:soundFilePath];
NSError *error = nil;
_audiorecord = [[ AVAudioRecorder alloc] initWithURL:url settings:recordSettings error:&error];
_audiorecord.meteringEnabled = YES;
if ([_audiorecord prepareToRecord] == YES){
_audiorecord.meteringEnabled = YES;
[_audiorecord record];
_timerForPitch =[NSTimer scheduledTimerWithTimeInterval: 0.01 target: self selector: @selector(levelTimerCallback:) userInfo: nil repeats: YES];
}else {
//int errorCode = CFSwapInt32HostToBig ([error code]);
//NSLog(@"Error: %@ [%4.4s])" , [error localizedDescription], (char*)&errorCode);
}
}
- (void)levelTimerCallback:(NSTimer *)timer {
[_audiorecord updateMeters];
// float linear = pow (10, [_audiorecord peakPowerForChannel:0] / 20);
float linear1 = pow (10, [_audiorecord averagePowerForChannel:0] / 20);
if (linear1>0.03) {
Pitch = linear1+.20;//pow (10, [audioRecorder averagePowerForChannel:0] / 20);//[audioRecorder peakPowerForChannel:0];
}
else {
Pitch = 0.0;
}
// //Pitch =linear1;
// NSLog(@"Pitch==%f",Pitch);
// _customRangeBar.value = Pitch;//linear1+.30;
[_audioPower setProgress:Pitch];
// float minutes = floor(_audiorecord.currentTime/60);
// float seconds = _audiorecord.currentTime - (minutes * 60);
// NSString *time = [NSString stringWithFormat:@"%0.0f.%0.0f",minutes, seconds];
// [self.statusLabel setText:[NSString stringWithFormat:@"%@ sec", time]];
// NSLog(@"recording");
}
- (IBAction)pauseClick:(id)sender {
NSLog(@"stopRecording");
// kSeconds = 0.0;
_viewForWave.hidden = YES;
[_audiorecord stop];
[self stopDisplayLink];
_shapeLayer.path = [[self pathAtInterval:0] CGPath];
[_timerForPitch invalidate];
_timerForPitch = nil;
}
- (void)stopDisplayLink
{
[_displayLink invalidate];
_displayLink = nil;
}
- (IBAction)resumeClick:(id)sender {
AVAudioSession *audioSession = [AVAudioSession sharedInstance];
[audioSession setCategory:AVAudioSessionCategoryPlayback error:nil];
NSArray *dirPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *docsDir = [dirPaths objectAtIndex:0];
NSString *soundFilePath = [docsDir stringByAppendingPathComponent:@"recordTest.caf"];
NSURL *url = [NSURL fileURLWithPath:soundFilePath];
NSError *error;
_player = [[AVAudioPlayer alloc] initWithContentsOfURL:url error:&error];
_player.numberOfLoops = 0;
[_player play];
}
- (IBAction)stopClick:(id)sender {
[_player stop];
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
}
@end
代码全部在这里了。其中
_viewForWave是一个波段的反应图。有兴趣的朋友可以自己去写一个,美化,参考我的写的话,记得给我留言。