3

我試圖使用AVAssetWriter,它也接受音頻輸入進行屏幕錄製。不過,我一直停留在這個錯誤,其中AVAssetWriter有時候會後,打了幾個電話上appendSampleBuffer:(內encodeAudioFrame:AVAssetWriter去appendSampleBuffer後的AVAssetWriterStatusFailed:

Failed: Error Domain=AVFoundationErrorDomain Code=-11800 "The operation could not be completed" UserInfo=0x32b570 {NSLocalizedDescription=The operation could not be completed, NSUnderlyingError=0x70d710 "The operation couldn’t be completed. (OSStatus error -12737.)", NSLocalizedFailureReason=An unknown error occurred (-12737)} 

一些觀察AVAssetWriterStatusFailed:

  • 一旦進入這種狀態,隨後的記錄嘗試即使我使用不同的記錄器對象,也會返回AVAssetWriterStatusFailed。
  • 當我將音頻記錄塊註釋掉時,錯誤不會顯示。
  • 但是,當我註釋掉視頻記錄塊並且不修改任何傳入的CMSampleBufferRef時,錯誤依然出現。

任何援助將不勝感激。

下面是我使用的代碼,爲簡潔起見省略了幾個部分。我正在使用OSX 10.9 SDK,並關閉了ARC。

- (BOOL) startRecording 
{ 
    if (!isRecording) 
    { 
     dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 
      [self startCapture]; 

      [self setUpWriter]; 

      startedAt = [NSDate date]; 
      isRecording = YES; 

      while (isRecording) 
      { 
       NSAutoreleasePool* pool = [NSAutoreleasePool new]; 

       NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt]; 

       CMTime tiem = CMTimeMakeWithSeconds(offset - pauseDelta, 1000); 

       [self encodeFrameAtTime:tiem]; 

       [pool drain]; 

       sleep(0.05f); 
      } 

      [self endCapture]; 

      [self completeRecordingSession]; 
     }); 
    } 

    return YES; 
} 

- (void) stopRecording { 
    isRecording = NO; 
} 

-(void) startCapture 
{ 
    AVCaptureDevice* microphone = x //Device selection code omitted 

    videoCaptureSession = [[AVCaptureSession alloc] init]; 

    videoCaptureSession.sessionPreset = AVCaptureSessionPresetHigh; 

    //------------------------------------------ 

    NSError* err = nil; 

    audioInput = [AVCaptureDeviceInput deviceInputWithDevice:microphone error:&err]; 

    [videoCaptureSession addInput:audioInput]; 

    //------------------------------------------ 

    audioOutput = [[AVCaptureAudioDataOutput alloc] init]; 

    queue = dispatch_queue_create("videoQueue", NULL); 

    [audioOutput setSampleBufferDelegate:self queue:queue]; 

    [videoCaptureSession addOutput:audioOutput]; 

    audioDelta = -1; 
    [videoCaptureSession startRunning]; 
} 


-(void) endCapture 
{ 
    [videoCaptureSession stopRunning]; 

    [videoCaptureSession removeInput:audioInput]; 
    [videoCaptureSession removeOutput:audioOutput]; 

    [audioOutput release]; 
    audioOutput = nil; 

    audioInput = nil; 

    [videoCaptureSession release]; 
    videoCaptureSession = nil; 

    dispatch_release(queue); 
} 

-(BOOL) setUpWriter 
{ 
    //delete the file. 
    { 
     NSFileManager* fileManager = [NSFileManager defaultManager]; 
     if ([fileManager fileExistsAtPath:self.moviePath]) { 
      NSError* error; 
      if ([fileManager removeItemAtPath:self.moviePath error:&error] == NO) { 
       NSLog(@"Could not delete old recording file at path: %@", self.moviePath); 
      } 
     } 
    } 

    mCaptureRect = NSRectToCGRect([screen frame]); 

    int FWidth = mCaptureRect.size.width; 
    int FHeight = mCaptureRect.size.height; 

    int bitRate = FWidth * FHeight * 8; 

    videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:self.moviePath] fileType:AVFileTypeMPEG4 error:nil]; 
    NSParameterAssert(videoWriter); 

    //Configure video 
    NSDictionary *codecSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
    [NSNumber numberWithInt:bitRate], AVVideoAverageBitRateKey, 
    nil]; 

    NSDictionary* videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
            AVVideoCodecH264, AVVideoCodecKey, 
            codecSettings,AVVideoCompressionPropertiesKey, 
            [NSNumber numberWithInt:FWidth], AVVideoWidthKey, 
            [NSNumber numberWithInt:FHeight], AVVideoHeightKey, 
            nil]; 

    videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; 

    NSParameterAssert(videoWriterInput); 
    videoWriterInput.expectsMediaDataInRealTime = YES; 
    NSDictionary* bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys: 
             [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, 
             [NSNumber numberWithInt:FWidth], kCVPixelBufferWidthKey, 
             [NSNumber numberWithInt:FHeight], kCVPixelBufferHeightKey, 
             nil]; 

    avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:bufferAttributes]; 

    //* 
    //Configure Audio 
    AudioChannelLayout acl; 
    bzero(&acl, sizeof(acl)); 
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; 

    NSDictionary* audioSettings = [ NSDictionary dictionaryWithObjectsAndKeys: 
            [ NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey, 
            [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, 
            [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, 
            [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey, 
            [NSNumber numberWithInt:64000], AVEncoderBitRateKey, 
            nil ]; 

    audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioSettings]; 
    audioWriterInput.expectsMediaDataInRealTime = YES; 

    //add input 
    [videoWriter addInput:videoWriterInput]; 
    [videoWriter addInput:audioWriterInput]; 

    return YES; 
} 

- (void) cleanupWriter { 
    [videoWriter release]; 
    videoWriter = nil; 
    avAdaptor = nil; 
    videoWriterInput = nil; 
    startedAt = nil; 
    audioWriterInput = nil; 
} 

- (void) encodeFrameAtTime:(CMTime)timestamp 
{ 
    if(!isRecording) return; 

    if(videoWriter == nil) return; 

    if(videoWriter.status == AVAssetWriterStatusFailed) 
    { 
     return; 
    } 

    if(videoWriter.status != AVAssetWriterStatusWriting) 
    { 
     if(videoWriter.status != AVAssetWriterStatusUnknown) 
      return; 

     [videoWriter startWriting]; 
     [videoWriter startSessionAtSourceTime:timestamp]; 

     startTime = CMTimeGetSeconds(timestamp); 
    } 

    timestamp = CMTimeMakeWithSeconds(startTime + CMTimeGetSeconds(timestamp), 1000); 

    [self writeVideoFrameAtTime:timestamp]; 
} 

-(void) writeVideoFrameAtTime:(CMTime)time { 
    if (![videoWriterInput isReadyForMoreMediaData]) 
    { 
    } 
    else 
    { 
     /* 
     CVPixelBufferRef manipulation omitted... 
     */ 

     { 
      BOOL success = [avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time]; 

      if(videoWriter.status == AVAssetWriterStatusFailed) NSLog(@"Failed: %@", videoWriter.error); 
      if (!success) NSLog(@"Warning: Unable to write buffer to video"); 
     } 

     CVPixelBufferRelease(pixelBuffer); 

     CGImageRelease(cgImage); 
    } 
} 

-(void) encodeAudioFrame:(CMSampleBufferRef)buffer 
{ 
    if(!isRecording) return; 

    CMTime timestamp = CMSampleBufferGetPresentationTimeStamp(buffer); 

    if(videoWriter.status != AVAssetWriterStatusWriting) 
    { 
     //Wait for video thread to start the writer 
     return; 
    } 

    if(![audioWriterInput isReadyForMoreMediaData]) 
     return; 

    //* 
    NSTimeInterval offset = [[NSDate date] timeIntervalSinceDate:startedAt]; 

    if(audioDelta == -1) 
    { 
     audioDelta = offset - CMTimeGetSeconds(timestamp); 
    } 

    //Adjusts CMSampleBufferRef's timestamp to match the video stream's zero-based timestamp 
    CMItemCount count; 
    CMTime newTimestamp = CMTimeMakeWithSeconds(CMTimeGetSeconds(timestamp) + audioDelta - pauseDelta, 1000); 

    CMSampleBufferGetSampleTimingInfoArray(buffer, 0, nil, &count); 
    CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count); 
    CMSampleBufferGetSampleTimingInfoArray(buffer, count, pInfo, &count); 

    for(CMItemCount i = 0; i < count; i++) 
    { 
     pInfo[i].decodeTimeStamp = newTimestamp; 
     pInfo[i].presentationTimeStamp = newTimestamp; 
    } 

    CMSampleBufferRef newBuffer; 
    CMSampleBufferCreateCopyWithNewTiming(kCFAllocatorDefault, buffer, count, pInfo, &newBuffer); 
    free(pInfo); 

    timestamp = CMSampleBufferGetPresentationTimeStamp(newBuffer); 

    BOOL res = [audioWriterInput appendSampleBuffer:newBuffer];  
} 

- (void) completeRecordingSession { 
    @autoreleasepool { 
     if(videoWriter.status != AVAssetWriterStatusWriting) 
     { 
      while (videoWriter.status == AVAssetWriterStatusUnknown) 
      { 
       [NSThread sleepForTimeInterval:0.5f]; 
      } 

      int status = videoWriter.status; 

      while (status == AVAssetWriterStatusUnknown) 
      { 
       NSLog(@"Waiting..."); 
       [NSThread sleepForTimeInterval:0.5f]; 
       status = videoWriter.status; 
      } 
     } 

     @synchronized(self) 
     { 
      [videoWriter finishWriting]; 
      [self cleanupWriter]; 
     } 
    } 
} 

-(void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
    if(!CMSampleBufferDataIsReady(sampleBuffer)) 
     return; 

    @autoreleasepool { 
     if(captureOutput == audioOutput) 
     { 
      if(isRecording && !isPaused) 
      { 
       [self encodeAudioFrame:sampleBuffer]; 
      } 
     } 
    } 
} 
+0

您的控制檯是否向您發送了垃圾郵件,說明您收到錯誤-12737,也稱爲kCMSampleBufferError_ArrayTooSmall?如果是這樣,我有相同的錯誤:(將報告回來,如果我得到任何東西... – nevyn

+1

你能解決這個問題嗎?我有同樣的問題。 – omarojo

+0

同樣的問題在這裏以及。@omarojo *你*能夠解決這個問題? – user1244109

回答

0

我和我的swift代碼有完全相同的問題。我發現我的電腦只是用完了內存。所以仔細檢查你是否有足夠的免費公羊。