2012-11-11 24 views
1

我試圖使用爲encoding asset writer提供的示例和從pixelBufferFromCGImage提供的示例的組合來覆蓋導出的AVAsset上的UIImage。AVAssetWriter和AVAssetWriterInputPixelBufferAdaptor組合的失敗appendixPixelBuffer

的問題是,儘管這個調用

[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

導出avasset的真實結果被破壞,yeilding意想不到的大小和後續的訪問將其與「該媒體可能會被損壞」失敗。

出口本身,如果我避免試圖使用appendPixelBuffer調用,如期望是成功的。但是,在派遣隊列之前或在它內部找到同樣的故障。

希望不會在這裏重複發佈,但其他樣本在堆棧溢出似乎並未解決此特定組合故障。謝謝,下面

導出代碼代碼:

AVAsset *sourceAsset = [AVAsset assetWithURL:outputUrl]; 

NSError *error = nil; 

NSString *fileName = [NSString stringWithFormat:@"non_transform_%f.mov", [[NSDate date] timeIntervalSince1970]]; 
NSString *combinedPath = [NSString stringWithFormat:@"file://localhost%@/%@", [[GroupDiskManager sharedManager] getFolderPath], fileName]; 

NSURL *outputURL = [NSURL URLWithString:combinedPath]; 
NSLog(@"combined path: %@", combinedPath); 

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:outputURL fileType:AVFileTypeQuickTimeMovie error:&error]; 


AVAssetTrack *videoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
AVAssetTrack *audioTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 


NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
           AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:1280], AVVideoWidthKey, 
           [NSNumber numberWithInt:720], AVVideoHeightKey, 
           nil]; 

AVAssetWriterInput* videoWriterInput = [[AVAssetWriterInput 
             assetWriterInputWithMediaType:AVMediaTypeVideo 
             outputSettings:videoSettings] retain]; 






NSMutableDictionary *attributes = [[NSMutableDictionary alloc] init]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:1280] forKey:(NSString*)kCVPixelBufferWidthKey]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:720] forKey:(NSString*)kCVPixelBufferHeightKey]; 


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
               assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput 
               sourcePixelBufferAttributes:attributes]; 


NSParameterAssert(videoWriterInput); 
NSParameterAssert([videoWriter canAddInput:videoWriterInput]); 
videoWriterInput.expectsMediaDataInRealTime = NO; 
[videoWriter addInput:videoWriterInput]; 

NSError *aerror = nil; 

NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]; 
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions]; 

AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:sourceAsset error:&aerror]; 
[reader addOutput:asset_reader_output]; 



AVAssetWriterInput* audioWriterInput = [[AVAssetWriterInput 
             assetWriterInputWithMediaType:AVMediaTypeAudio 
             outputSettings:nil] retain]; 
AVAssetReader *audioReader = [[AVAssetReader assetReaderWithAsset:sourceAsset error:&error] retain]; 




AVAssetReaderOutput *audioReaderOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil]; 
[audioReader addOutput:audioReaderOutput]; 
NSParameterAssert(audioWriterInput); 
NSParameterAssert([videoWriter canAddInput:audioWriterInput]); 
audioWriterInput.expectsMediaDataInRealTime = YES; 
[videoWriter addInput:audioWriterInput]; 
[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 
[reader startReading]; 


CVPixelBufferRef buffer = [ImageToMovieManager pixelBufferFromCGImage:[UIImage imageNamed:@"234_1280x720_3.jpg"].CGImage size:CGSizeMake(1280, 720)]; 
BOOL theResult = [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

if (theResult == NO) //failes on 3GS, but works on iphone 4 
    NSLog(@"failed to append buffer"); 

if(buffer) { 
    CVBufferRelease(buffer); 
} 




dispatch_queue_t _processingQueue = dispatch_queue_create("_processingQueue", NULL); 
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock: 
^{ 
    NSLog(@"requestMediaDataWhenReadyOnQueue"); 

    [self retain]; 

    while ([videoWriterInput isReadyForMoreMediaData]) { 



     CMSampleBufferRef sampleBuffer; 
     if ([reader status] == AVAssetReaderStatusReading && 
      (sampleBuffer = [asset_reader_output copyNextSampleBuffer])) { 

      BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer]; 
      CFRelease(sampleBuffer); 

      if (!result) { 
       NSLog(@" result == nil Cancel!"); 
       NSLog(@"videoWriter.error: %@", videoWriter.error); 
       [reader cancelReading]; 
       break; 

      } 
     } else { 
      NSLog(@"[videoWriterInput markAsFinished]"); 

      [videoWriterInput markAsFinished]; 

      switch ([reader status]) { 
       case AVAssetReaderStatusReading: 
        NSLog(@"reading"); 
        // the reader has more for other tracks, even if this one is done 
        break; 

       case AVAssetReaderStatusCompleted: 
        NSLog(@"AVAssetReaderStatusCompleted"); 

        [audioReader startReading]; 
        dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL); 
        [audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^ 
         { 

          while (audioWriterInput.readyForMoreMediaData) { 
           CMSampleBufferRef nextBuffer; 
           if ([audioReader status] == AVAssetReaderStatusReading && 
            (nextBuffer = [audioReaderOutput copyNextSampleBuffer])) { 
            if (nextBuffer) { 
             [audioWriterInput appendSampleBuffer:nextBuffer]; 
            } 
           }else{ 
            [audioWriterInput markAsFinished]; 
            switch ([audioReader status]) { 
             case AVAssetReaderStatusCompleted: 
              NSLog(@"AVAssetReaderStatusCompleted!!"); 
              [videoWriter finishWriting]; 
              [VideoManager videoSavedWithURL:outputURL withError:(NSError *)error]; 
              break; 
            } 
           } 
          } 

         } 
         ]; 
        break; 

       case AVAssetReaderStatusFailed: 
        NSLog(@"AVAssetReaderStatusFailed"); 
        [videoWriter cancelWriting]; 
        break; 
      } 
      break; 
     } 
    } 
} 

]; 

pixelBufferFromCGImageCode

CGSize frameSize = CGSizeMake(CGImageGetWidth(image), CGImageGetHeight(image)); 
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 
         [NSNumber numberWithBool:NO], kCVPixelBufferCGImageCompatibilityKey, 
         [NSNumber numberWithBool:NO], kCVPixelBufferCGBitmapContextCompatibilityKey, 
         nil]; 
CVPixelBufferRef pxbuffer = NULL; 
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, frameSize.width, 
             frameSize.height, kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange, (CFDictionaryRef) options, 
             &pxbuffer); 
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

CVPixelBufferLockBaseAddress(pxbuffer, 0); 
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 


CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
CGContextRef context = CGBitmapContextCreate(pxdata, frameSize.width, 
              frameSize.height, 8, 4*frameSize.width, rgbColorSpace, 
              kCGImageAlphaNoneSkipLast); 

CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), 
             CGImageGetHeight(image)), image); 
CGColorSpaceRelease(rgbColorSpace); 
CGContextRelease(context); 

CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

return pxbuffer; 

回答

0

最起碼的像素格式應被指定爲kCVPixelFormatType_32BGRA不kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange。

相關問題