我不完全確定這可以單獨使用AVFoundation來實現,您可能需要使用AudioUnit框架並創建一個流。將.WAV文件的內容發送到音頻緩衝區應該相對簡單。
這就是我一直在Piti Piti Pa做的。另一個好處是你可以更好地控制音頻延遲,以同步音頻和視頻動畫(使用藍牙時更明顯)。
下面是我使用初始化音頻單元代碼:(如果它們導出到RAW格式更容易)
+(BOOL)_createAudioUnitInstance
{
// Describe audio component
AudioComponentDescription desc;
desc.componentType = kAudioUnitType_Output;
desc.componentSubType = kAudioUnitSubType_RemoteIO;
desc.componentFlags = 0;
desc.componentFlagsMask = 0;
desc.componentManufacturer = kAudioUnitManufacturer_Apple;
AudioComponent inputComponent = AudioComponentFindNext(NULL, &desc);
// Get audio units
OSStatus status = AudioComponentInstanceNew(inputComponent, &_audioUnit);
[self _logStatus:status step:@"instantiate"];
return (status == noErr);
}
+(BOOL)_setupAudioUnitOutput
{
UInt32 flag = 1;
OSStatus status = AudioUnitSetProperty(_audioUnit,
kAudioOutputUnitProperty_EnableIO,
kAudioUnitScope_Output,
_outputAudioBus,
&flag,
sizeof(flag));
[self _logStatus:status step:@"set output bus"];
return (status == noErr);
}
+(BOOL)_setupAudioUnitFormat
{
AudioStreamBasicDescription audioFormat = {0};
audioFormat.mSampleRate = 44100.00;
audioFormat.mFormatID = kAudioFormatLinearPCM;
audioFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagIsPacked;
audioFormat.mFramesPerPacket = 1;
audioFormat.mChannelsPerFrame = 2;
audioFormat.mBitsPerChannel = 16;
audioFormat.mBytesPerPacket = 4;
audioFormat.mBytesPerFrame = 4;
OSStatus status = AudioUnitSetProperty(_audioUnit,
kAudioUnitProperty_StreamFormat,
kAudioUnitScope_Input,
_outputAudioBus,
&audioFormat,
sizeof(audioFormat));
[self _logStatus:status step:@"set audio format"];
return (status == noErr);
}
+(BOOL)_setupAudioUnitRenderCallback
{
AURenderCallbackStruct audioCallback;
audioCallback.inputProc = playbackCallback;
audioCallback.inputProcRefCon = (__bridge void *)(self);
OSStatus status = AudioUnitSetProperty(_audioUnit,
kAudioUnitProperty_SetRenderCallback,
kAudioUnitScope_Global,
_outputAudioBus,
&audioCallback,
sizeof(audioCallback));
[self _logStatus:status step:@"set render callback"];
return (status == noErr);
}
+(BOOL)_initializeAudioUnit
{
OSStatus status = AudioUnitInitialize(_audioUnit);
[self _logStatus:status step:@"initialize"];
return (status == noErr);
}
+(void)start
{
[self clearFeeds];
[self _startAudioUnit];
}
+(void)stop
{
[self _stopAudioUnit];
}
+(BOOL)_startAudioUnit
{
OSStatus status = AudioOutputUnitStart(_audioUnit);
[self _logStatus:status step:@"start"];
return (status == noErr);
}
+(BOOL)_stopAudioUnit
{
OSStatus status = AudioOutputUnitStop(_audioUnit);
[self _logStatus:status step:@"stop"];
return (status == noErr);
}
+(void)_logStatus:(OSStatus)status step:(NSString *)step
{
if(status != noErr)
{
NSLog(@"AudioUnit failed to %@, error: %d", step, (int)status);
}
}
#pragma mark - Mixer
static OSStatus playbackCallback(void *inRefCon,
AudioUnitRenderActionFlags *ioActionFlags,
const AudioTimeStamp *inTimeStamp,
UInt32 inBusNumber,
UInt32 inNumberFrames,
AudioBufferList *ioData) {
@autoreleasepool {
AudioBuffer *audioBuffer = ioData->mBuffers;
_lastPushedFrame = _nextFrame;
[SIOAudioMixer _generateAudioFrames:inNumberFrames into:audioBuffer->mData];
}
return noErr;
}
現在你只需要提取.wav文件的內容和發送通過回調傳遞給緩衝區。
我希望有幫助!
在第一NSLog的,你看到的任何錯誤?我的意思是,音頻播放器已成功初始化?你確定'soundFile'參數不是零嗎? – 2013-04-28 18:54:12
audioplayer inits沒有任何錯誤。是的,soundFile不是零(使用調試器來檢查這一點,並在前臺使用與應用程序完全相同的文件)。 – itsame69 2013-04-28 19:25:55
「AVAudioSession」的'error'對象的值是多少?你假設你的音頻會話開始時沒有檢查錯誤。另外,你只是設置一次或每次播放音頻?應該只需要在應用程序委託中擁有一次。 – iwasrobbed 2013-04-28 23:19:53