2011-03-04 44 views
3

我試圖讓音頻與iOS應用程序的視頻一起工作。視頻很好。沒有音頻記錄到文件AVAssetWritter無法使用音頻

這裏的初始化設置(我的iPhone揚聲器的工作原理。):

session = [[AVCaptureSession alloc] init]; 
    menu->session = session; 
    menu_open = NO; 
    session.sessionPreset = AVCaptureSessionPresetMedium; 
    camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
    microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
    menu->camera = camera; 
    [session beginConfiguration]; 
    [camera lockForConfiguration:nil]; 
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ 
     camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; 
    } 
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ 
     camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; 
    } 
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ 
     camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; 
    } 
    if ([camera hasTorch]) { 
     if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ 
      [camera setTorchMode:AVCaptureTorchModeOn]; 
     } 
    } 
    [camera unlockForConfiguration]; 
    [session commitConfiguration]; 
    AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil]; 
    [session addInput:camera_input]; 
    microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain]; 
    AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease]; 
    output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
    [session addOutput:output]; 
    output.minFrameDuration = CMTimeMake(1,30); 
    dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL); 
    [output setSampleBufferDelegate:self queue:queue]; 
    dispatch_release(queue); 
    audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain]; 
    queue = dispatch_queue_create("MY QUEUE", NULL); 
    AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease]; 
    special_delegate->normal_delegate = self; 
    [special_delegate retain]; 
    [audio_output setSampleBufferDelegate:special_delegate queue:queue]; 
    dispatch_release(queue); 
    [session startRunning]; 

這裏是開始和結束記錄:

if (recording) { //Hence stop recording 
    [video_button setTitle:@"Video" forState: UIControlStateNormal]; 
    recording = NO; 
    [writer_input markAsFinished]; 
    [audio_writer_input markAsFinished]; 
    [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; 
    [video_writer finishWriting]; 
    UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil); 
    [start_time release]; 
    [temp_url release]; 
    [av_adaptor release]; 
    [microphone lockForConfiguration:nil]; 
    [session beginConfiguration]; 
    [session removeInput:microphone_input]; 
    [session removeOutput:audio_output]; 
    [session commitConfiguration]; 
    [microphone unlockForConfiguration]; 
    [menu restateConfigiration]; 
    [vid_off play]; 
}else{ //Start recording 
    [vid_on play]; 
    [microphone lockForConfiguration:nil]; 
    [session beginConfiguration]; 
    [session addInput:microphone_input]; 
    [session addOutput:audio_output]; 
    [session commitConfiguration]; 
    [microphone unlockForConfiguration]; 
    [menu restateConfigiration]; 
    [video_button setTitle:@"Stop" forState: UIControlStateNormal]; 
    recording = YES; 
    NSError *error = nil; 
    NSFileManager * file_manager = [[NSFileManager alloc] init]; 
    temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"]; 
    [file_manager removeItemAtPath: temp_url error:NULL]; 
    [file_manager release]; 
    video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error]; 
    NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil]; 
    writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain]; 
    AudioChannelLayout acl; 
    bzero(&acl, sizeof(acl)); 
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 
    audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain]; 
    audio_writer_input.expectsMediaDataInRealTime = YES; 
    av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain]; 
    [video_writer addInput:writer_input]; 
    [video_writer addInput: audio_writer_input]; 
    [video_writer startWriting]; 
    [video_writer startSessionAtSourceTime: CMTimeMake(0,1)]; 
    start_time = [[NSDate alloc] init]; 
} 

這裏是代表對於音頻:

@implementation AudioOutputBufferDelegate 
    -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{ 
     if (normal_delegate->recording) { 
      CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30)); 
      [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer]; 
     } 
    } 
@end 

視頻方法並不重要,因爲它的工作原理。 「restateConfigiration」只是整理會議配置,否則火炬熄滅等:

[session beginConfiguration]; 
    switch (quality) { 
     case Low: 
      session.sessionPreset = AVCaptureSessionPresetLow; 
      break; 
     case Medium: 
      session.sessionPreset = AVCaptureSessionPreset640x480; 
      break; 
    } 
    [session commitConfiguration]; 
    [camera lockForConfiguration:nil]; 
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){ 
     camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure; 
    } 
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){ 
     camera.focusMode = AVCaptureFocusModeContinuousAutoFocus; 
    } 
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){ 
     camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance; 
    } 
    if ([camera hasTorch]) { 
     if (torch) { 
      if([camera isTorchModeSupported:AVCaptureTorchModeOn]){ 
       [camera setTorchMode:AVCaptureTorchModeOn]; 
      } 
     }else{ 
      if([camera isTorchModeSupported:AVCaptureTorchModeOff]){ 
       [camera setTorchMode:AVCaptureTorchModeOff]; 
      } 
     } 
    } 
    [camera unlockForConfiguration]; 

謝謝你的任何幫助。

回答

8

AVAssetWriter and Audio

這可能是由於在鏈接的文章中提到了同樣的問題。嘗試註釋掉這些行

[writer_input markAsFinished]; 
[audio_writer_input markAsFinished]; 
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)]; 

編輯

我不知道,如果你設置了顯示時間標記的方法是一定是錯誤的。我處理這個問題的方法是在開始時將本地變量設置爲0。然後,當我委託收到的第一個包我做的:

if (_startTime.value == 0) { 
    _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
} 

然後

[bufferWriter->writer startWriting]; 
[bufferWriter->writer startSessionAtSourceTime:_startTime]; 

你的代碼看起來有效,你是計算每個接收數據包的時間差。但是,AVFoundation會爲您計算此值,並優化在交錯容器中放置的時間戳。我不確定的另一件事是每個用於音頻的CMSampleBufferRef包含多於1個數據緩衝區,其中每個數據緩衝區都有它自己的PTS。我不確定設置PTS是否會自動調整所有其他數據緩衝區。

我的代碼與您的代碼不同的地方在於,我爲音頻和視頻使用了單個調度隊列。在我使用的回調中(刪除了一些代碼)。

switch (bufferWriter->writer.status) { 
    case AVAssetWriterStatusUnknown: 

     if (_startTime.value == 0) { 
      _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
     } 

     [bufferWriter->writer startWriting]; 
     [bufferWriter->writer startSessionAtSourceTime:_startTime]; 

     //Break if not ready, otherwise fall through. 
     if (bufferWriter->writer.status != AVAssetWriterStatusWriting) { 
      break ; 
     } 

    case AVAssetWriterStatusWriting: 
     if(captureOutput == self.captureManager.audioOutput) { 
       if(!bufferWriter->audioIn.readyForMoreMediaData) { 
        break; 
       } 

       @try { 
        if(![bufferWriter->audioIn appendSampleBuffer:sampleBuffer]) { 
         [self delegateMessage:@"Audio Writing Error" withType:ERROR]; 
        } 
       } 
       @catch (NSException *e) { 
        NSLog(@"Audio Exception: %@", [e reason]); 
       } 
     } 
     else if(captureOutput == self.captureManager.videoOutput) { 

      if(!bufferWriter->videoIn.readyForMoreMediaData) { 
       break;; 
      } 

      @try { 
       if (!frontCamera) { 
        if(![bufferWriter->videoIn appendSampleBuffer:sampleBuffer]) { 
         [self delegateMessage:@"Video Writing Error" withType:ERROR]; 
        } 
       } 
       else { 
        CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 

        flipBuffer(sampleBuffer, pixelBuffer); 

        if(![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt]) { 
         [self delegateMessage:@"Video Writing Error" withType:ERROR]; 
        } 
       } 

      } 
      @catch (NSException *e) { 
       NSLog(@"Video Exception Exception: %@", [e reason]); 
      } 
     } 

     break; 
    case AVAssetWriterStatusCompleted: 
     return; 
    case AVAssetWriterStatusFailed: 
     [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR]; 
     bufferWriter->writer_failed = YES ; 
     _broadcastError = YES; 
     [self stopCapture] ; 
     return; 
    case AVAssetWriterStatusCancelled: 
     break; 
    default: 
     break; 
} 
+1

謝謝你的回答。我刪除了這些行,它的工作方式就像以前一樣。沒有音頻仍然。 – 2011-03-05 20:12:46

+0

看起來一目瞭然。唯一不確定的是你如何處理樣本緩衝區的PTS。 – 2011-03-05 21:48:08

+0

對不起,什麼是PTS? – 2011-03-05 22:01:08

相關問題