0
嘿,在這裏我肯定沒有深度,但不幸的是,由於我是由講師提供了項目,所以太遲了。初始化AVAudio會話模式測量
我試圖通過在我的項目中使用AVAudio會話模式測量來禁用應用於我的輸入的系統提供的信號處理。
但是我很努力地找到任何消息來源。
我希望的結果是,通過啓用此功能,我將能夠在我的應用程序中製作更準確的讀數。
下面是代碼:
#import "ViewController.h"
@import AudioToolbox;
@import AVFoundation;
#define kOutputBus 0
#define kInputBus 1
@interface ViewController()
@property (nonatomic, weak) IBOutlet UILabel *dBSPLView2;
@end
@implementation ViewController
static AudioComponentInstance audioUnit;
- (void)viewDidLoad {
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
[self setupAudio];
}
- (void) setupAudio {
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
AudioComponent comp = AudioComponentFindNext(NULL, &desc);
OSStatus status = AudioComponentInstanceNew(comp, &audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AudioStreamBasicDescription audioFormat;
audioFormat.mSampleRate = 96000.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 1;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerFrame = audioFormat.mChannelsPerFrame * sizeof(SInt16);
audioFormat.mBytesPerPacket = audioFormat.mFramesPerPacket * audioFormat.mBytesPerFrame;
UInt32 flag = 1;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, kInputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
flag = 0;
status = AudioUnitSetProperty(audioUnit, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Output, kOutputBus, &flag, sizeof(flag));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitSetProperty(audioUnit , kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, kInputBus, &audioFormat, sizeof(audioFormat));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void*)self;
status = AudioUnitSetProperty(audioUnit , kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, kInputBus, &callbackStruct, sizeof(callbackStruct));
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
status = AudioUnitInitialize(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
static OSStatus recordingCallback(
void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData
) {
AudioBuffer buffer;
buffer.mNumberChannels = 1;
buffer.mDataByteSize = inNumberFrames * sizeof(SInt16);
buffer.mData = malloc(buffer.mDataByteSize);
AudioBufferList bufferList;
bufferList.mNumberBuffers = 1;
bufferList.mBuffers[0] = buffer;
OSStatus status = AudioUnitRender(audioUnit , ioActionFlags, inTimeStamp, inBusNumber, inNumberFrames, &bufferList);
if (status != noErr) {
printf("Error\n");
return -1;
}
SInt16 *frameBuffer = buffer.mData;
double totalAmplitude = 0;
for (int i = 0; i < inNumberFrames; i++) {
// printf("%i\n",frameBuffer[i]);
totalAmplitude += frameBuffer[i] * frameBuffer[i];
}
totalAmplitude /= inNumberFrames;
totalAmplitude = sqrt(totalAmplitude);
//Creates a negative number that goes no higher than zero
//float SPLFloat = totalAmplitude/(float)SHRT_MAX * 2;
float dBFloat = (20 * log10(totalAmplitude)) + 11;
dispatch_async(dispatch_get_main_queue(), ^{
ViewController *viewController = (__bridge ViewController*)inRefCon;
viewController.dBSPLView2.text = [NSString stringWithFormat:@"%.f", dBFloat];
});
return noErr;
}
- (IBAction)recordButtonPressed:(id)sender {
NSError *error;
[[AVAudioSession sharedInstance] setActive:YES error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryRecord error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
[[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) {
if (granted) {
OSStatus status = AudioOutputUnitStart(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
} else {
NSAssert(NO, @"Error");
}
}];
}
- (IBAction)stopButtonPressed:(id)sender {
OSStatus status = AudioOutputUnitStop(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
NSError *error;
[[AVAudioSession sharedInstance] setActive:NO error:&error];
if (error != nil) {
NSAssert(error == nil, @"Error");
}
}
- (void)didReceiveMemoryWarning {
[super didReceiveMemoryWarning];
// Dispose of any resources that can be recreated.
}
- (void) dealloc {
OSStatus status = AudioComponentInstanceDispose(audioUnit);
if (status != noErr) {
NSAssert(status == noErr,@"Error");
}
}
@end
感謝你幾乎相當明顯/簡單,是的錯誤我目前使用的處理方式是來自我跟隨的教程視頻,希望能夠完成程序的基本功能,以便我可以完成10,000個單詞的報告,然後在難以實現的情況下將它變成一個可行的應用程序!再次感謝你的幫助! – ChrisBearBaker
沒問題!祝你好運! –