2014-09-25 71 views
3

我在服務器上記錄數據並立即將它們發送到客戶端。 客戶端收到UDP數據包這樣的:IO直接播放來自UDP流(NSData)的原始音頻

(void)udpSocket:(GCDAsyncUdpSocket *)sock didReceiveData:**(NSData *)data** fromAddress:(NSData *)address withFilterContext:(id)filterContext 
{  
if (!isRunning) return; 
if (data) 
{   
} 
else 
{  
} 
} 

現在的原始數據是數據變量。我想立即播放它。我真的坐在這個問題上,就像2天...我只想簡單的事情像Java中的音頻軌道。我讀了很多關於音頻隊列等,但仍然不理解它。你能給我一個提示,但請以代碼的形式。在我看來,我檢查了每個網站 - .-尋找每個例子,但不明白他們。回調函數在一些緩衝區被填充後開始(在很多例子中),但我不明白我可以如何使用NSData填充它們。

回答

0

我有興趣聽到這個問題的答案。我的解決方案是在iOS中使用OpenAL製作我自己的音頻服務器,OpenAL開箱即可呈現音頻緩衝區 - 基本上只有一個線程處理消耗從服務器發送的音頻流 - 另一個線程用於運行您自己的OpenAL服務器我在這裏概述:

#import <OpenAL/al.h> 
#import <OpenAL/alc.h> 
#import <AudioToolbox/ExtendedAudioFile.h> 


-(void) init_openal { 

openal_device = alcOpenDevice(NULL); 

if (openal_device != NULL) { 

    // create context 

    openal_context = alcCreateContext(openal_device, 0); 

    if (openal_context != NULL) { 

     // activate this new context 

     alcMakeContextCurrent(openal_context); 

    } else { 

     NSLog(@"STR_OPENAL ERROR - failed to create context"); 
     return; 
    } 

} else { 

    NSLog(@"STR_OPENAL ERROR - failed to get audio device"); 
    return; 
} 

alGenBuffers(MAX_OPENAL_QUEUE_BUFFERS, available_AL_buffer_array); // allocate the buffer array to given number of buffers 

alGenSources(1, & streaming_source); 

printf("STR_OPENAL streaming_source starts with %u\n", streaming_source); 

printf("STR_OPENAL initialization of available_AL_buffer_array_curr_index to 0\n"); 

available_AL_buffer_array_curr_index = 0; 

self.previous_local_lpcm_buffer = 0; 

[self get_next_buffer]; 

} // init_openal 



// calling init and retrieving buffers logic left out goes here 

-(void) inner_run { 

ALenum al_error; 

// UN queue used buffers 

ALint buffers_processed = 0; 

alGetSourcei(streaming_source, AL_BUFFERS_PROCESSED, & buffers_processed); // get source parameter num used buffs 

while (buffers_processed > 0) {  // we have a consumed buffer so we need to replenish 

    NSLog(@"STR_OPENAL inner_run seeing consumed buffer"); 

    ALuint unqueued_buffer; 

    alSourceUnqueueBuffers(streaming_source, 1, & unqueued_buffer); 

    // about to decrement available_AL_buffer_array_curr_index 

    available_AL_buffer_array_curr_index--; 

    printf("STR_OPENAL to NEW %d with unqueued_buffer %d\n", 
      available_AL_buffer_array_curr_index, 
      unqueued_buffer); 

    available_AL_buffer_array[available_AL_buffer_array_curr_index] = unqueued_buffer; 

    buffers_processed--; 
} 

// queue UP fresh buffers 

if (available_AL_buffer_array_curr_index >= MAX_OPENAL_QUEUE_BUFFERS) { 

    printf("STR_OPENAL about to sleep since internal OpenAL queue is full\n"); 

    [NSThread sleepUntilDate:[NSDate dateWithTimeIntervalSinceNow: SLEEP_ON_OPENAL_QUEUE_FULL]]; 

} else { 

    NSLog(@"STR_OPENAL YYYYYYYYY available_AL_buffer_array_curr_index %d MAX_OPENAL_QUEUE_BUFFERS %d", 
      available_AL_buffer_array_curr_index, 
      MAX_OPENAL_QUEUE_BUFFERS 
     ); 

    ALuint curr_audio_buffer = available_AL_buffer_array[available_AL_buffer_array_curr_index]; 

    ALsizei size_buff; 
    ALenum data_format; 
    ALsizei sample_rate; 

    size_buff = MAX_SIZE_CIRCULAR_BUFFER; // works nicely with 1016064 

    sample_rate = lpcm_output_sampling_frequency; 
    data_format = AL_FORMAT_STEREO16;  // AL_FORMAT_STEREO16 == 4355 (0x1103) --- AL_FORMAT_MONO16 

    printf("STR_OPENAL curr_audio_buffer is %u data_format %u size_buff %u\n", 
      curr_audio_buffer, 
      data_format, 
      size_buff 
      ); 


    // write_output_file([TS_ONLY_delete_this_var_temp_aif_fullpath 
    // cStringUsingEncoding:NSUTF8StringEncoding], curr_lpcm_buffer, 
    // curr_lpcm_buffer_sizeof); 


    if (self.local_lpcm_buffer == self.previous_local_lpcm_buffer) { 

     printf("STR_OPENAL NOTICE - need to throttle up openal sleep duration seeing same value for local_lpcm_buffer %d - so will skip loading into alBufferData\n", 
       (int) self.local_lpcm_buffer); 

    } else { 


     NSLog(@"STR_OPENAL about to call alBufferData curr_audio_buffer %d local_lpcm_buffer address %d local_aac_index %d", 
       curr_audio_buffer, 
       (int) self.local_lpcm_buffer, 
       self.local_aac_index); 

     // copy audio data into curr_buffer 

     alBufferData(curr_audio_buffer, data_format, self.local_lpcm_buffer, size_buff, sample_rate); // curr_audio_buffer is an INT index determining which buffer to use 

     self.previous_local_lpcm_buffer = self.local_lpcm_buffer; 

     alSourceQueueBuffers(streaming_source, 1, & curr_audio_buffer); 

     printf("STR_OPENAL about to increment available_AL_buffer_array_curr_index from OLD %d", 
      available_AL_buffer_array_curr_index); 

     available_AL_buffer_array_curr_index++; 

     printf("STR_OPENAL available_AL_buffer_array_curr_index to NEW %d\n", available_AL_buffer_array_curr_index); 
    } 

    al_error = alGetError(); 
    if(AL_NO_ERROR != al_error) 
    { 
     NSLog(@"STR_OPENAL ERROR - alSourceQueueBuffers error: %s", alGetString(al_error)); 
     return; 
    } 

    ALenum current_playing_state; 

    alGetSourcei(streaming_source, AL_SOURCE_STATE, & current_playing_state); // get source parameter STATE 

    al_error = alGetError(); 
    if(AL_NO_ERROR != al_error) 
    { 
     NSLog(@"STR_OPENAL ERROR - alGetSourcei error: %s", alGetString(al_error)); 
     return; 
    } 

    if (AL_PLAYING != current_playing_state) { 

     ALint buffers_queued = 0; 

     alGetSourcei(streaming_source, AL_BUFFERS_QUEUED, & buffers_queued); // get source parameter num queued buffs 

     NSLog(@"STR_OPENAL NOTICE - play is NOT AL_PLAYING: %x, buffers_queued: %d", current_playing_state, buffers_queued); 

     if (buffers_queued > 0 && NO == self.streaming_paused) { 

      // restart play 

      NSLog(@"STR_OPENAL about to restart play"); 

      alSourcePlay(streaming_source); 

      al_error = alGetError(); 
      if (AL_NO_ERROR != al_error) { 

       NSLog(@"STR_OPENAL ERROR - alSourcePlay error: %s", alGetString(al_error)); 
      } 
     } 
    } 


    if (self.last_aac_index == self.local_aac_index && available_AL_buffer_array_curr_index == 0) { 

     NSLog(@"STR_OPENAL reached end of event tell parent"); 

     [self send_running_condition_message_to_parent: rendered_last_buffer]; 

     flag_continue_running = false; // terminate since all rendering work is done 

    } else { 

     [self get_next_buffer]; 
    } 
} 
}  //  inner_run 
+0

ty你的代碼:D但openAL api更難理解。所以你有你的streaming_source並將它傳遞給unqueued_buffer?我嘗試瞭解這些代碼,但我必須在OpenAL中使用低(更好的0)體驗。有沒有「更容易」的建議? – Tony 2014-09-25 17:15:29

0

我認爲這裏沒有現成的iOS解決方案。嘗試深入研究CoreAudio框架。 或尋找一些現成的圖書館StreamingKit Library