2012-12-01 67 views
6

我正在嘗試編寫一個iOS應用程序,它將從麥克風接收到的聲音傳遞給揚聲器而不做任何更改。我讀過蘋果文檔和指南。我選擇了這個guide的第一個模式。但沒有發生 - 沉默。正如你可以看到我試圖使用AUAudioGraph(註釋) - 相同的結果(我是否需要它在這個簡單的例子?)。iOS AudioUnits通過

我在互聯網上看到了幾個使用回調的例子,但我不想使用任何。可能嗎?

有什麼建議嗎? 感謝您的關注。

的實際代碼

#import "AudioController.h" 
#import <AudioToolbox/AudioToolbox.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AudioToolbox/AudioServices.h> 
#define kInputBus 1 
#define kOutputBus 0 

@interface AudioController() 
{ 
    AudioComponentDescription desc; 
    AudioComponent component; 
    AudioUnit unit; 
    AudioStreamBasicDescription audioFormat; 
    double rate; 
    //AUGraph graph; 
} 


@end 

@implementation AudioController 

- (void) setUp { 
    AVAudioSession *sess = [AVAudioSession sharedInstance]; 
    NSError *error = nil; 
    rate = 44100.0; 
    [sess setPreferredSampleRate:rate error:&error]; 
    [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; 
    [sess setActive:YES error:&error]; 
    rate = [sess sampleRate]; 
    if (error) { 
     NSLog(@"%@", error); 
    } 

    NSLog(@"Init..."); 
    [self createUnitDesc]; 
    [self getComponent]; 
    [self getAudioUnit]; 
    [self enableIORec]; 
    [self enableIOPb]; 
    [self createFormat]; 
    [self applyFormat]; 
    OSStatus err = AudioUnitInitialize(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
    /*NewAUGraph(&graph); 
    AUNode node; 
    AUGraphAddNode(graph, &desc, &node); 
AUGraphInitialize(graph); 
AUGraphOpen(graph);*/ 
} 

- (void) createUnitDesc { 
    desc.componentType = kAudioUnitType_Output; 
    desc.componentSubType = kAudioUnitSubType_RemoteIO; 
    desc.componentFlags = 0; 
    desc.componentFlagsMask = 0; 
    desc.componentManufacturer = kAudioUnitManufacturer_Apple; 

} 

- (void) getComponent { 
    component = AudioComponentFindNext(NULL, &desc); 
} 

- (void) getAudioUnit { 
    OSStatus res = AudioComponentInstanceNew(component, &unit); 
    if (noErr != res) { 
     [self showStatus:res]; 
    } 
} 

- (void) enableIORec { 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Input, 
            kInputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) enableIOPb { 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Output, 
            kOutputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) createFormat { 
    // Describe format 
    audioFormat.mSampleRate   = rate;//44100.00; 
    audioFormat.mFormatID   = kAudioFormatLinearPCM; 
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 
    audioFormat.mFramesPerPacket = 1; 
    audioFormat.mChannelsPerFrame = 1; 
    audioFormat.mBitsPerChannel  = 16; 
    audioFormat.mBytesPerPacket  = 2; 
    audioFormat.mBytesPerFrame  = 2; 
} 

- (void) applyFormat { 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Output, 
            kInputBus, 
            &audioFormat, 
            sizeof(audioFormat)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) start { 
    NSLog(@"starting"); 
    OSStatus err = AudioOutputUnitStart(unit); 
    //AUGraphStart(graph); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) end { 
    NSLog(@"ending"); 
    OSStatus err = AudioOutputUnitStop(unit); 
    //AUGraphStop(graph); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void) showStatus:(OSStatus) st{ 
    NSString *text = nil; 
    switch (st) { 
     case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; 
     case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; 
     case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; 
     case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; 
     case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; 
     case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; 
     case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; 
     case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; 
     case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; 
     case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; 
     case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; 
     case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; 
     case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; 
     case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; 
     case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; 
     case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; 
     case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; 
     case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; 
     case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; 
     case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; 
     case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; 
     default: text = @"unknown error"; 
    } 
    NSLog(@"TRANSLATED_ERROR = %li = %@", st, text); 
} 

- (void) dealloc { 
    AudioUnitUninitialize(unit); 

    [super dealloc]; 
} 

@end 
+2

看起來你正在做的一切都非常正確。但是,我沒有看到實際將輸入範圍的輸出元素連接到輸出範圍的輸入元素的位置。 RemoteIO單元的特殊之處在於它可以處理硬件輸入和輸出,但是在實例化單元時這些單元並不是隱式連接的。 – warrenm

+0

嗯,請給我一個提示,我可以如何實現它?你在說AUAudioGraph嗎?還是有其他方式來創建元素之間的連接?謝謝。 –

+0

謝謝,用AudioUnitConnection conn完成; conn.destInputNumber = 0; conn.sourceAudioUnit = unit; conn.sourceOutputNumber = 1; err = AudioUnitSetProperty(unit,kAudioUnitProperty_MakeConnection,kAudioUnitScope_Input,0,&conn,sizeof(conn));如果(noErr!= err){ [self showStatus:err]; } –

回答

10

正如warrenm所述建立連接之間的遠程IO元件幫助。 所以,放置所有初始化後的代碼進行:

AudioUnitConnection conn; 
conn.destInputNumber = kOutputBus; 
conn.sourceAudioUnit = unit; 
conn.sourceOutputNumber = kInputBus; 
err = AudioUnitSetProperty(unit, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, kOutputBus, &conn, sizeof(conn)); 
if (noErr != err) { [self showStatus:err]; } 

UPDATE 爲了方便他人使用該解決方案,我將發佈完整的代碼在這裏:

.h文件

#import <Foundation/Foundation.h> 

@interface AudioController : NSObject 

- (void)setUp; 
- (void)start; 
- (void)end; 
@end 

.m文件

#import "AudioController.h" 
#import <AudioToolbox/AudioToolbox.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AudioToolbox/AudioServices.h> 
#define kInputBus 1 
#define kOutputBus 0 

@interface AudioController() 
{ 
    AudioComponentDescription desc; 
    AudioComponent component; 
    AudioUnit unit; 
    AudioStreamBasicDescription audioFormat; 
    double rate; 
} 
@end 

@implementation AudioController 

- (void)setUp 
{ 
    AVAudioSession *sess = [AVAudioSession sharedInstance]; 
    NSError *error = nil; 
    rate = 44100.0; 
    [sess setPreferredSampleRate:rate error:&error]; 
    [sess setCategory:AVAudioSessionCategoryPlayAndRecord error:&error]; 
    [sess setActive:YES error:&error]; 
    rate = [sess sampleRate]; 
    if (error) { 
     NSLog(@"%@", error); 
    } 

    NSLog(@"Initing"); 
    [self createUnitDesc]; 
    [self getComponent]; 
    [self getAudioUnit]; 
    [self enableIORec]; 
    [self enableIOPb]; 
    [self createFormat]; 
    [self applyFormat]; 
    OSStatus err = AudioUnitInitialize(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 

    AudioUnitConnection conn; 
    conn.destInputNumber = 0; 
    conn.sourceAudioUnit = unit; 
    conn.sourceOutputNumber = 1; 
    err = AudioUnitSetProperty(unit, kAudioUnitProperty_MakeConnection, kAudioUnitScope_Input, 0, &conn, sizeof(conn)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)createUnitDesc 
{ 
    desc.componentType = kAudioUnitType_Output; 
    desc.componentSubType = kAudioUnitSubType_RemoteIO; 
    desc.componentFlags = 0; 
    desc.componentFlagsMask = 0; 
    desc.componentManufacturer = kAudioUnitManufacturer_Apple; 

} 

- (void)getComponent 
{ 
    component = AudioComponentFindNext(NULL, &desc); 
} 

- (void)getAudioUnit 
{ 
    OSStatus res = AudioComponentInstanceNew(component, &unit); 
    if (noErr != res) { 
     [self showStatus:res]; 
    } 
} 

- (void)enableIORec 
{ 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Input, 
            kInputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)enableIOPb 
{ 
    UInt32 flag = 1; 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioOutputUnitProperty_EnableIO, 
            kAudioUnitScope_Output, 
            kOutputBus, 
            &flag, 
            sizeof(flag)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)createFormat 
{ 
    // Describe format 
    audioFormat.mSampleRate   = rate; 
    audioFormat.mFormatID   = kAudioFormatLinearPCM; 
    audioFormat.mFormatFlags  = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked; 
    audioFormat.mFramesPerPacket = 1; 
    audioFormat.mChannelsPerFrame = 1; 
    audioFormat.mBitsPerChannel  = 16; 
    audioFormat.mBytesPerPacket  = 2; 
    audioFormat.mBytesPerFrame  = 2; 
} 

- (void)applyFormat 
{ 
    OSStatus err = AudioUnitSetProperty(unit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Output, 
            kInputBus, 
            &audioFormat, 
            sizeof(audioFormat)); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)start 
{ 
    NSLog(@"starting"); 
    OSStatus err = AudioOutputUnitStart(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)end 
{ 
    NSLog(@"ending"); 
    OSStatus err = AudioOutputUnitStop(unit); 
    if (noErr != err) { 
     [self showStatus:err]; 
    } 
} 

- (void)showStatus:(OSStatus)st 
{ 
    NSString *text = nil; 
    switch (st) { 
     case kAudioUnitErr_CannotDoInCurrentContext: text = @"kAudioUnitErr_CannotDoInCurrentContext"; break; 
     case kAudioUnitErr_FailedInitialization: text = @"kAudioUnitErr_FailedInitialization"; break; 
     case kAudioUnitErr_FileNotSpecified: text = @"kAudioUnitErr_FileNotSpecified"; break; 
     case kAudioUnitErr_FormatNotSupported: text = @"kAudioUnitErr_FormatNotSupported"; break; 
     case kAudioUnitErr_IllegalInstrument: text = @"kAudioUnitErr_IllegalInstrument"; break; 
     case kAudioUnitErr_Initialized: text = @"kAudioUnitErr_Initialized"; break; 
     case kAudioUnitErr_InstrumentTypeNotFound: text = @"kAudioUnitErr_InstrumentTypeNotFound"; break; 
     case kAudioUnitErr_InvalidElement: text = @"kAudioUnitErr_InvalidElement"; break; 
     case kAudioUnitErr_InvalidFile: text = @"kAudioUnitErr_InvalidFile"; break; 
     case kAudioUnitErr_InvalidOfflineRender: text = @"kAudioUnitErr_InvalidOfflineRender"; break; 
     case kAudioUnitErr_InvalidParameter: text = @"kAudioUnitErr_InvalidParameter"; break; 
     case kAudioUnitErr_InvalidProperty: text = @"kAudioUnitErr_InvalidProperty"; break; 
     case kAudioUnitErr_InvalidPropertyValue: text = @"kAudioUnitErr_InvalidPropertyValue"; break; 
     case kAudioUnitErr_InvalidScope: text = @"kAudioUnitErr_InvalidScope"; break; 
     case kAudioUnitErr_NoConnection: text = @"kAudioUnitErr_NoConnection"; break; 
     case kAudioUnitErr_PropertyNotInUse: text = @"kAudioUnitErr_PropertyNotInUse"; break; 
     case kAudioUnitErr_PropertyNotWritable: text = @"kAudioUnitErr_PropertyNotWritable"; break; 
     case kAudioUnitErr_TooManyFramesToProcess: text = @"kAudioUnitErr_TooManyFramesToProcess"; break; 
     case kAudioUnitErr_Unauthorized: text = @"kAudioUnitErr_Unauthorized"; break; 
     case kAudioUnitErr_Uninitialized: text = @"kAudioUnitErr_Uninitialized"; break; 
     case kAudioUnitErr_UnknownFileType: text = @"kAudioUnitErr_UnknownFileType"; break; 
     default: text = @"unknown error"; 
    } 
    NSLog(@"TRANSLATED_ERROR = %li = %@", st, text); 
} 

- (void)dealloc 
{ 
    AudioUnitUninitialize(unit); 

    [super dealloc]; 
} 

@end 
+0

您能否詳細說明您是如何使其工作的?我無法使它工作 – Srikanth

+2

@Srikanth,請看我發佈的代碼。首先,您需要創建一個'AudioController'對象,然後調用'setUp',然後通過調用'start'和'end'方法來控制播放。 –

+1

@Srikanth創建兩個按鈕,即啓動和停止,然後爲啓動按鈕插座,調用啓動方法和停止按鈕插座,調用結束方法。希望這可以幫助。 – madLokesh