2012-04-12 72 views
2

我是AudioUnit和AUGraph服務的新手。 我的需求是使用這些服務播放單個聲音文件。 我已經從蘋果開發者網站下載了MixerHost項目,並更改了一些代碼。 但現在的代碼崩潰如何在iPhone上使用audiounit和augraph播放音樂sdk

我的新改變的代碼是:

#import "JKAudioPlayer.h" 
#pragma mark Mixer input bus render callback 

// 
// Declared as AURenderCallback in AudioUnit/AUComponent.h. See Audio Unit Component Services Reference. 
static OSStatus inputRenderCallback (

            void      *inRefCon, 
            AudioUnitRenderActionFlags *ioActionFlags, 
            const AudioTimeStamp  *inTimeStamp, 
            UInt32      inBusNumber, 
            UInt32      inNumberFrames, 
            AudioBufferList    *ioData 
            ) { 

    soundStructPtr soundStructPointerArray = (soundStructPtr) inRefCon; 
    UInt32   frameTotalForSound  = soundStructPointerArray[inBusNumber].frameCount; 
    BOOL    isStereo     = soundStructPointerArray[inBusNumber].isStereo; 

    AudioUnitSampleType *dataInLeft; 
    AudioUnitSampleType *dataInRight; 

    dataInLeft     = soundStructPointerArray[inBusNumber].audioDataLeft; 
    if (isStereo) dataInRight = soundStructPointerArray[inBusNumber].audioDataRight; 

    AudioUnitSampleType *outSamplesChannelLeft; 
    AudioUnitSampleType *outSamplesChannelRight; 

    outSamplesChannelLeft     = (AudioUnitSampleType *) ioData->mBuffers[0].mData; 
    if (isStereo) outSamplesChannelRight = (AudioUnitSampleType *) ioData->mBuffers[1].mData; 

    UInt32 sampleNumber = soundStructPointerArray[0].sampleNumber; 

    for (UInt32 frameNumber = 0; frameNumber < inNumberFrames; ++frameNumber) { 
     outSamplesChannelLeft[frameNumber]     = dataInLeft[sampleNumber]; 
     if (isStereo) outSamplesChannelRight[frameNumber] = dataInRight[sampleNumber]; 
     sampleNumber++; 
     if (sampleNumber >= frameTotalForSound) sampleNumber = 0; 
    } 
    soundStructPointerArray[inBusNumber].sampleNumber = sampleNumber; 
    return noErr; 
} 

#pragma mark - 
#pragma mark Audio route change listener callback 
static void audioRouteChangeListenerCallback (
               void      *inUserData, 
               AudioSessionPropertyID inPropertyID, 
               UInt32     inPropertyValueSize, 
               const void    *inPropertyValue 
              ) { 
    if (inPropertyID != kAudioSessionProperty_AudioRouteChange) return; 
    JKAudioPlayer *audioObject = (__bridge_transfer JKAudioPlayer *) inUserData; 
    if (NO == audioObject.isPlaying) { 
     NSLog (@"Audio route change while application audio is stopped."); 
     return; 
    } else { 
     CFDictionaryRef routeChangeDictionary = inPropertyValue; 
     CFNumberRef routeChangeReasonRef = 
     CFDictionaryGetValue (
           routeChangeDictionary, 
           CFSTR (kAudioSession_AudioRouteChangeKey_Reason) 
          ); 

     SInt32 routeChangeReason; 

     CFNumberGetValue (
          routeChangeReasonRef, 
          kCFNumberSInt32Type, 
          &routeChangeReason 
         );   
     if (routeChangeReason == kAudioSessionRouteChangeReason_OldDeviceUnavailable) { 

      NSLog (@"Audio output device was removed; stopping audio playback."); 
      NSString *MixerHostAudioObjectPlaybackStateDidChangeNotification = @"MixerHostAudioObjectPlaybackStateDidChangeNotification"; 
      [[NSNotificationCenter defaultCenter] postNotificationName: MixerHostAudioObjectPlaybackStateDidChangeNotification object: audioObject]; 

     } else { 

      NSLog (@"A route change occurred that does not require stopping application audio."); 
     } 
    } 
} 

@implementation JKAudioPlayer 
@synthesize monoStreamFormat;   // mono format for use in buffer and mixer input for "beats" sound 
@synthesize graphSampleRate;   // sample rate to use throughout audio processing chain 
@synthesize mixerUnit;     // the Multichannel Mixer unit 
@synthesize playing;     // Boolean flag to indicate whether audio is playing or not 
@synthesize interruptedDuringPlayback; // Boolean flag to indicate whether audio was playing when an interruption arrived 
- (id) init { 

    self = [super init]; 

    if (!self) return nil; 
    NSURL *beatsLoop = [[NSBundle mainBundle] URLForResource: @"beatsMono" 
               withExtension: @"caf"]; 
    sourceURL = (__bridge_retained CFURLRef) beatsLoop; 
    self.interruptedDuringPlayback = NO; 
    [self setupAudioSession];  
    [self setupMonoStreamFormat]; 
    [self readAudioFilesIntoMemory]; 
    [self configureAndInitializeAudioProcessingGraph]; 
    [self enableMixerInput: 0 isOn: YES]; 
    [self setMixerOutputGain:1]; 
    [self setMixerInput: 0 gain:YES]; 
    return self; 
} 
- (void) setupAudioSession { 
    AVAudioSession *mySession = [AVAudioSession sharedInstance]; 
    [mySession setDelegate: self]; 
    NSError *audioSessionError = nil; 
    [mySession setCategory: AVAudioSessionCategoryPlayback 
        error: &audioSessionError]; 

    if (audioSessionError != nil) { 

     NSLog (@"Error setting audio session category."); 
     return; 
    } 
    self.graphSampleRate = 44100.0; // Hertz 
    [mySession setPreferredHardwareSampleRate: graphSampleRate 
             error: &audioSessionError]; 
    if (audioSessionError != nil) { 

     NSLog (@"Error setting preferred hardware sample rate."); 
     return; 
    } 
    [mySession setActive: YES 
        error: &audioSessionError]; 

    if (audioSessionError != nil) { 

     NSLog (@"Error activating audio session during initial setup."); 
     return; 
    }  
    self.graphSampleRate = [mySession currentHardwareSampleRate]; 
    AudioSessionAddPropertyListener (
            kAudioSessionProperty_AudioRouteChange, 
            audioRouteChangeListenerCallback, 
            (__bridge void*)self 
            ); 
} 
- (void) setupMonoStreamFormat { 
    size_t bytesPerSample = sizeof (AudioUnitSampleType); 
    monoStreamFormat.mFormatID   = kAudioFormatLinearPCM; 
    monoStreamFormat.mFormatFlags  = kAudioFormatFlagsAudioUnitCanonical; 
    monoStreamFormat.mBytesPerPacket = bytesPerSample; 
    monoStreamFormat.mFramesPerPacket = 1; 
    monoStreamFormat.mBytesPerFrame  = bytesPerSample; 
    monoStreamFormat.mChannelsPerFrame = 1;     // 1 indicates mono 
    monoStreamFormat.mBitsPerChannel = 8 * bytesPerSample; 
    monoStreamFormat.mSampleRate  = graphSampleRate; 
} 


- (void) readAudioFilesIntoMemory { 
    ExtAudioFileRef audioFileObject = 0; 
    OSStatus result = ExtAudioFileOpenURL (sourceURL, &audioFileObject); 
    if (noErr != result || NULL == audioFileObject) {NSLog(@"error ext audiofile open url %ld",result); return;} 
    UInt64 totalFramesInFile = 0; 
    UInt32 frameLengthPropertySize = sizeof (totalFramesInFile); 
    result = ExtAudioFileGetProperty (
             audioFileObject, 
             kExtAudioFileProperty_FileLengthFrames, 
             &frameLengthPropertySize, 
             &totalFramesInFile 
             ); 

    if (noErr != result) {NSLog(@"ExtAudioFileGetProperty (audio file length in frames %ld",result); return;} 
    soundStruct.frameCount = totalFramesInFile; 
    AudioStreamBasicDescription fileAudioFormat = {0}; 
    UInt32 formatPropertySize = sizeof (fileAudioFormat); 
    result = ExtAudioFileGetProperty (
             audioFileObject, 
             kExtAudioFileProperty_FileDataFormat, 
             &formatPropertySize, 
             &fileAudioFormat 
             ); 

    if (noErr != result) {NSLog(@"ExtAudioFileGetProperty (file audio format)%ld",result); return;} 

    UInt32 channelCount = fileAudioFormat.mChannelsPerFrame; 
    soundStruct.audioDataLeft = 
    (AudioUnitSampleType *) calloc (totalFramesInFile, sizeof (AudioUnitSampleType)); 

    AudioStreamBasicDescription importFormat = {0}; 
    if (1 == channelCount) { 

     soundStruct.isStereo = NO; 
     importFormat = monoStreamFormat; 

    } else { 

     NSLog (@"*** WARNING: File format not supported - wrong number of channels"); 
     ExtAudioFileDispose (audioFileObject); 
     return; 
    } 
    result = ExtAudioFileSetProperty (
             audioFileObject, 
             kExtAudioFileProperty_ClientDataFormat, 
             sizeof (importFormat), 
             &importFormat 
             ); 

    if (noErr != result) {NSLog(@"ExtAudioFileSetProperty (client data format %ld", result); return;} 
    AudioBufferList *bufferList; 
    bufferList = (AudioBufferList *) malloc (
              sizeof (AudioBufferList) + sizeof (AudioBuffer) * (channelCount - 1) 
              ); 

    if (NULL == bufferList) {NSLog (@"*** malloc failure for allocating bufferList memory"); return;} 

    bufferList->mNumberBuffers = channelCount; 
    AudioBuffer emptyBuffer = {0}; 
    size_t arrayIndex; 
    for (arrayIndex = 0; arrayIndex < channelCount; arrayIndex++) { 
     bufferList->mBuffers[arrayIndex] = emptyBuffer; 
    } 
    bufferList->mBuffers[0].mNumberChannels = 1; 
    bufferList->mBuffers[0].mDataByteSize = totalFramesInFile * sizeof (AudioUnitSampleType); 
    bufferList->mBuffers[0].mData   = soundStruct.audioDataLeft; 

    if (2 == channelCount) { 
     bufferList->mBuffers[1].mNumberChannels = 1; 
     bufferList->mBuffers[1].mDataByteSize = totalFramesInFile * sizeof (AudioUnitSampleType); 
     bufferList->mBuffers[1].mData   = soundStruct.audioDataRight; 
    } 

    UInt32 numberOfPacketsToRead = (UInt32) totalFramesInFile; 

    result = ExtAudioFileRead (
           audioFileObject, 
           &numberOfPacketsToRead, 
           bufferList 
           ); 

    free (bufferList); 

    if (noErr != result) { 

     NSLog(@"ExtAudioFileRead failure - %ld " , result); 
     free (soundStruct.audioDataLeft); 
     soundStruct.audioDataLeft = 0; 

     if (2 == channelCount) { 
      free (soundStruct.audioDataRight); 
      soundStruct.audioDataRight = 0; 
     } 

     ExtAudioFileDispose (audioFileObject);    
     return; 
    } 

    NSLog (@"Finished reading file into memory"); 
    soundStruct.sampleNumber = 0; 
    ExtAudioFileDispose (audioFileObject); 
} 

- (void) configureAndInitializeAudioProcessingGraph { 

    NSLog (@"Configuring and then initializing audio processing graph"); 
    OSStatus result = noErr; 
    result = NewAUGraph (&processingGraph); 

    if (noErr != result) {[self printErrorMessage: @"NewAUGraph" withStatus: result]; return;} 

    // I/O unit 
    AudioComponentDescription iOUnitDescription; 
    iOUnitDescription.componentType   = kAudioUnitType_Output; 
    iOUnitDescription.componentSubType  = kAudioUnitSubType_RemoteIO; 
    iOUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple; 
    iOUnitDescription.componentFlags   = 0; 
    iOUnitDescription.componentFlagsMask  = 0; 

    // Multichannel mixer unit 
    AudioComponentDescription MixerUnitDescription; 
    MixerUnitDescription.componentType   = kAudioUnitType_Mixer; 
    MixerUnitDescription.componentSubType  = kAudioUnitSubType_MultiChannelMixer; 
    MixerUnitDescription.componentManufacturer = kAudioUnitManufacturer_Apple; 
    MixerUnitDescription.componentFlags   = 0; 
    MixerUnitDescription.componentFlagsMask  = 0; 


    //............................................................................ 
    // Add nodes to the audio processing graph. 
    NSLog (@"Adding nodes to audio processing graph"); 

    AUNode iONode;   // node for I/O unit 
    AUNode mixerNode;  // node for Multichannel Mixer unit 

    // Add the nodes to the audio processing graph 
    result = AUGraphAddNode (
           processingGraph, 
           &iOUnitDescription, 
           &iONode); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for I/O unit" withStatus: result]; return;} 


    result = AUGraphAddNode (
           processingGraph, 
           &MixerUnitDescription, 
           &mixerNode 
           ); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphNewNode failed for Mixer unit" withStatus: result]; return;} 
    result = AUGraphOpen (processingGraph); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphOpen" withStatus: result]; return;} 
    result = AUGraphNodeInfo (
           processingGraph, 
           mixerNode, 
           NULL, 
           &mixerUnit 
           ); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphNodeInfo" withStatus: result]; return;} 
    UInt32 busCount = 2; // bus count for mixer unit input 

    UInt32 beatsBus = 1; // mixer unit bus 1 will be mono and will take the beats sound 

    NSLog (@"Setting mixer unit input bus count to: %lu", busCount); 
    result = AudioUnitSetProperty (
            mixerUnit, 
            kAudioUnitProperty_ElementCount, 
            kAudioUnitScope_Input, 
            0, 
            &busCount, 
            sizeof (busCount) 
            ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit bus count)" withStatus: result]; return;} 
    UInt32 maximumFramesPerSlice = 4096; 

    result = AudioUnitSetProperty (
            mixerUnit, 
            kAudioUnitProperty_MaximumFramesPerSlice, 
            kAudioUnitScope_Global, 
            0, 
            &maximumFramesPerSlice, 
            sizeof (maximumFramesPerSlice) 
            ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit input stream format)" withStatus: result]; return;} 
    for (UInt16 busNumber = 0; busNumber < busCount; ++busNumber) { 

     // Setup the struture that contains the input render callback 
     AURenderCallbackStruct inputCallbackStruct; 
     inputCallbackStruct.inputProc  = &inputRenderCallback; 
     inputCallbackStruct.inputProcRefCon = &soundStruct; 

     NSLog (@"Registering the render callback with mixer unit input bus %u", busNumber); 
     // Set a callback for the specified node's specified input 
     result = AUGraphSetNodeInputCallback (
               processingGraph, 
               mixerNode, 
               busNumber, 
               &inputCallbackStruct 
              ); 

     if (noErr != result) {[self printErrorMessage: @"AUGraphSetNodeInputCallback" withStatus: result]; return;} 
    } 


    NSLog (@"Setting mono stream format for mixer unit \"beats\" input bus"); 
    result = AudioUnitSetProperty (
            mixerUnit, 
            kAudioUnitProperty_StreamFormat, 
            kAudioUnitScope_Input, 
            beatsBus, 
            &monoStreamFormat, 
            sizeof (monoStreamFormat) 
            ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit beats input bus stream format)" withStatus: result];return;} 


    NSLog (@"Setting sample rate for mixer unit output scope"); 
    result = AudioUnitSetProperty (
            mixerUnit, 
            kAudioUnitProperty_SampleRate, 
            kAudioUnitScope_Output, 
            0, 
            &graphSampleRate, 
            sizeof (graphSampleRate) 
            ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetProperty (set mixer unit output stream format)" withStatus: result]; return;} 
    NSLog (@"Connecting the mixer output to the input of the I/O unit output element"); 

    result = AUGraphConnectNodeInput (
             processingGraph, 
             mixerNode,   // source node 
             0,     // source node output bus number 
             iONode,   // destination node 
             0     // desintation node input bus number 
            ); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphConnectNodeInput" withStatus: result]; return;} 
    CAShow (processingGraph); 
    result = AUGraphInitialize (processingGraph); 

    if (noErr != result) {[self printErrorMessage: @"AUGraphInitialize" withStatus: result]; return;} 
} 

// Start playback 
- (void) startAUGraph { 
    OSStatus result = AUGraphStart (processingGraph); 
    if (noErr != result) {[self printErrorMessage: @"AUGraphStart" withStatus: result]; return;} 

    self.playing = YES; 
} 

// Stop playback 
- (void) stopAUGraph { 
    Boolean isRunning = false; 
    OSStatus result = AUGraphIsRunning (processingGraph, &isRunning); 
    if (noErr != result) {[self printErrorMessage: @"AUGraphIsRunning" withStatus: result]; return;} 

    if (isRunning) { 

     result = AUGraphStop (processingGraph); 
     if (noErr != result) {[self printErrorMessage: @"AUGraphStop" withStatus: result]; return;} 
     self.playing = NO; 
    } 
} 

- (void) enableMixerInput: (UInt32) inputBus isOn: (AudioUnitParameterValue) isOnValue { 
    OSStatus result = AudioUnitSetParameter (
              mixerUnit, 
              kMultiChannelMixerParam_Enable, 
              kAudioUnitScope_Input, 
              inputBus, 
              isOnValue, 
              0 
              ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (enable the mixer unit)" withStatus: result]; return;} 


} 

- (void) setMixerInput: (UInt32) inputBus gain: (AudioUnitParameterValue) newGain { 
    NSLog(@"mixer input %lu gain %f",inputBus,newGain); 

    OSStatus result = AudioUnitSetParameter (
              mixerUnit, 
              kMultiChannelMixerParam_Volume, 
              kAudioUnitScope_Input, 
              inputBus, 
              newGain, 
              0 
              ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit input volume)" withStatus: result]; return;} 

} 
- (void) setMixerOutputGain: (AudioUnitParameterValue) newGain { 
    NSLog(@"mixer output gain %f",newGain); 
    OSStatus result = AudioUnitSetParameter (
              mixerUnit, 
              kMultiChannelMixerParam_Volume, 
              kAudioUnitScope_Output, 
              0, 
              newGain, 
              0 
              ); 

    if (noErr != result) {[self printErrorMessage: @"AudioUnitSetParameter (set mixer unit output volume)" withStatus: result]; return;} 

} 
- (void) printErrorMessage: (NSString *) errorString withStatus: (OSStatus) result { 

    char resultString[5]; 
    UInt32 swappedResult = CFSwapInt32HostToBig (result); 
    bcopy (&swappedResult, resultString, 4); 
    resultString[4] = '\0'; 

    NSLog (
      @"*** %@ error: %@ %@s", 
      errorString, 
      (char*) &resultString 
      ); 
} 

@end 

在該點的應用程序崩潰

outSamplesChannelLeft [frameNumber的] = dataInLeft [將SampleNumber]; if(isStereo)outSamplesChannelRight [frameNumber] = dataInRight [sampleNumber]; 什麼是我的代碼錯誤 誰能幫助我

+0

如果您只需要播放單個聲音,Core-Audio就會過度殺傷。再加上它很難。爲什麼不嘗試OpenAL - 這是一個很好的開源包裝:https://github.com/kstenerud/ObjectAL-for-iPhone – 2012-04-13 00:56:42

+0

@skinnyTOD我怎麼用openAL來測量輸出聲音的功率。 我目前使用CDAudiomanager(科科斯聲音引擎)。但我無法測量輸出功率。我也想改變播放聲音的音調,這就是爲什麼因爲我喜歡Cocos聲音引擎而不是AVAudioPlayer。你對這些需求有什麼想法? – Johnykutty 2012-04-13 05:00:59

+0

我不認爲你可以用OpenAL進行測量。 iOS上有各種音頻選項:http://developer.apple.com/library/ios/#documentation/AudioVideo/Conceptual/MultimediaPG/UsingAudio/UsingAudio.html。 – 2012-04-13 05:29:19

回答

0

following SO question包含工作,播放與音頻單元的單個文件的代碼(甚至用攪拌機其間,你可能不需要這一點)。儘管問題表明它不起作用,但它對我來說很有用 - 最後只需添加一個AUGraphStart(_graph)即可。它使用AudioFilePlayer單元,無需編寫您自己的渲染回調。