在我的應用程序中,我正在渲染回調中進行音頻處理(僅輸入,不輸出)。 這裏是我初始化音頻:iOS在渲染回調處理之前添加音頻效果?
-(void) initAudio {
OSStatus status;
NewAUGraph(&graph);
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
AUNode ioNode;
status = AUGraphAddNode(graph, &desc, &ioNode);
checkStatus(status, "At adding node");
AUGraphOpen(graph);
AUGraphNodeInfo(graph, ioNode, NULL, &audioUnit);
//Enable IO for recording
UInt32 enableInput = 1;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Input,
kInputBus,
&enableInput,
sizeof(enableInput));
checkStatus(status, "At setting property for input");
//Disable playback
UInt32 enableOutput = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
kOutputBus,
&enableOutput,
sizeof(enableOutput));
checkStatus(status, "At setting property for input");
// ASBD
AudioStreamBasicDescription audioFormatIn;
audioFormatIn.mSampleRate = SampleRate;
audioFormatIn.mFormatID = kAudioFormatLinearPCM;
audioFormatIn.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormatIn.mFramesPerPacket = 1;
audioFormatIn.mChannelsPerFrame = 1;
audioFormatIn.mBitsPerChannel = 16;//sizeof(AudioSampleType) * 8;
audioFormatIn.mBytesPerPacket = 2 * audioFormatIn.mChannelsPerFrame;
audioFormatIn.mBytesPerFrame = 2 * audioFormatIn.mChannelsPerFrame;
//Apply format
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Output,
kInputBus,
&audioFormatIn,
sizeof(audioFormatIn));
checkStatus(status,"At setting property for AudioStreamBasicDescription for input");
//Set up input callback
AURenderCallbackStruct callbackStruct;
callbackStruct.inputProc = recordingCallback;
callbackStruct.inputProcRefCon = (__bridge void *)self;
status = AudioUnitSetProperty(audioUnit,
kAudioOutputUnitProperty_SetInputCallback,
kAudioUnitScope_Global,
kInputBus,
&callbackStruct,
sizeof(callbackStruct));
checkStatus(status,"At setting property for recording callback");
// Disable buffer allocation for the recorder
UInt32 flag = 0;
status = AudioUnitSetProperty(audioUnit,
kAudioUnitProperty_ShouldAllocateBuffer,
kAudioUnitScope_Output,
kInputBus,
&flag,
sizeof(flag));
checkStatus(status, "At set property should allocate buffer");
// Allocate own buffers
tempBuffer.mNumberChannels = 1;
tempBuffer.mDataByteSize = 1024 * 2;
tempBuffer.mData = malloc(1024 * 2);
status = AUGraphInitialize(graph);
checkStatus(status,"At AUGraph Initalize");
}
現在,我想在渲染回調處理之前增加一個高通濾波器或帶通濾波器的輸入音頻。所以我覺得我應該增加這樣的:
desc.componentType = kAudioUnitType_Effect;
desc.componentSubType = kAudioUnitSubType_BandPassFilter;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
,但我沒能創造/正確連接節點,使這項工作... 感謝您的幫助!
您的渲染鏈條是否在沒有帶通的情況下工作? – dave234
@Dave我曾經使用過經典的音頻單元初始化,但是我開始轉向AUGraph,即使我不確定自己做的是否正確,我上面的代碼似乎也能正常工作。 – jcr