The following code is working fine (some checking and code eliminated) at 44100 depth 2 which seems to be the default. Since this interface is usable, but not documented well, anyone know of how to change the default quality?
audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
captureSession = [[AVCaptureSession alloc] init];
audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
[captureSession addInput:audioInput];
audioOutput = [[AVCaptureAudioDataOutput alloc] init];
[audioOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[captureSession addOutput:audioOutput];
This should do the trick.
More here
union
{
OSStatus propertyResult;
char a[4];
} u;
Float64 F64sampleRate = 8192.0;
Float64 F64realSampleRate = 0;
UInt32 F64datasize = 8;
u.propertyResult = AudioSessionSetProperty ( kAudioSessionProperty_PreferredHardwareSampleRate ,sizeof(F64sampleRate) , &F64sampleRate );
NSLog(@"Set Error Set Sample Rate %ld %lx %c%c%c%c",u.propertyResult,u.propertyResult,u.a[3],u.a[2],u.a[1],u.a[0]);
u.propertyResult = AudioSessionGetProperty ( kAudioSessionProperty_CurrentHardwareSampleRate , &F64datasize, &F64realSampleRate );
NSLog(@"Get Error Current Sample Rate %ld %lx %c%c%c%c",u.propertyResult,u.propertyResult,u.a[3],u.a[2],u.a[1],u.a[0]);
NSLog(@"Sample Rate is %f",F64realSampleRate);