2011-01-09 66 views
4

我總是得到:CGImageCreate:無效圖像尺寸:0×0.9轉換CMSampleBufferRef到的UIImage

ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 

// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos. 
[library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos 
     usingBlock:^(ALAssetsGroup *group, BOOL *stop) { 

    // Within the group enumeration block, filter to enumerate just videos. 
    [group setAssetsFilter:[ALAssetsFilter allVideos]]; 

    // For this example, we're only interested in the first item. 
    [group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0] 
     options:0 
     usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) { 

     // The end of the enumeration is signaled by asset == nil. 
     if (alAsset) { 
      ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain]; 
      NSURL *url = [representation url]; 
      AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain]; 
      AVAssetReader *assetReader = [[AVAssetReader assetReaderWithAsset:avAsset error:nil] retain]; 
      NSArray *tracks = [avAsset tracksWithMediaType:AVMediaTypeVideo]; 
      AVAssetTrack *videoTrack = [tracks objectAtIndex:0]; 
      AVAssetReaderTrackOutput *assetReaderOutput = [[AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:videoTrack outputSettings:nil] retain]; 
      if (![assetReader canAddOutput:assetReaderOutput]) {printf("could not read reader output\n");} 
      [assetReader addOutput:assetReaderOutput]; 
      [assetReader startReading]; 
      CMSampleBufferRef nextBuffer = [assetReaderOutput copyNextSampleBuffer]; 
      UIImage* image = imageFromSampleBuffer(nextBuffer);    
     } 
     }]; 
} 
failureBlock: ^(NSError *error) {NSLog(@"No groups");}]; 

的imageFromSampleBuffer直接從蘋果來了:

UIImage* imageFromSampleBuffer(CMSampleBufferRef nextBuffer) { 

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(nextBuffer); 
printf("total size:%u\n",CMSampleBufferGetTotalSampleSize(nextBuffer)); 
// Lock the base address of the pixel buffer. 
//CVPixelBufferLockBaseAddress(imageBuffer,0); 

// Get the number of bytes per row for the pixel buffer. 
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
// Get the pixel buffer width and height. 
size_t width = CVPixelBufferGetWidth(imageBuffer); 
size_t height = CVPixelBufferGetHeight(imageBuffer); 
printf("b:%d w:%d h:%d\n",bytesPerRow,width,height); 

// Create a device-dependent RGB color space. 
static CGColorSpaceRef colorSpace = NULL; 
if (colorSpace == NULL) { 
    colorSpace = CGColorSpaceCreateDeviceRGB(); 
    if (colorSpace == NULL) { 
    // Handle the error appropriately. 
    return nil; 
    } 
} 

// Get the base address of the pixel buffer. 
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
// Get the data size for contiguous planes of the pixel buffer. 
size_t bufferSize = CVPixelBufferGetDataSize(imageBuffer); 

// Create a Quartz direct-access data provider that uses data we supply. 
CGDataProviderRef dataProvider = 
CGDataProviderCreateWithData(NULL, baseAddress, bufferSize, NULL); 
// Create a bitmap image from data supplied by the data provider. 
CGImageRef cgImage = 
CGImageCreate(width, height, 8, 32, bytesPerRow, 
     colorSpace, kCGImageAlphaNoneSkipFirst | kCGBitmapByteOrder32Little, 
     dataProvider, NULL, true, kCGRenderingIntentDefault); 
CGDataProviderRelease(dataProvider); 

// Create and return an image object to represent the Quartz image. 
UIImage *image = [UIImage imageWithCGImage:cgImage]; 
CGImageRelease(cgImage); 

CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 

return image; 
} 

我嘗試得到的長度和寬度,基本上它會打印出樣本緩衝區的大小,知道緩衝區本身並不存在,但對於AVAssetReaderTrackOutput * assetReaderOutput,我沒有獲得UIImage

+0

請注意,我沒有做任何一種AVAssetWriter(輸出)的目的,因爲它ID不需要到這一點 – mike 2011-01-09 03:19:05

回答

1

。 ..

NSMutableDictionary *outputSettings = [NSMutableDictionary dictionary]; 
[outputSettings setObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; 
0

我知道您想要從您所有的本地視頻中讀取第一張圖片嗎? 您可以使用簡單的方法來完成所有這些。

ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 

// Enumerate just the photos and videos group by using ALAssetsGroupSavedPhotos. 
[library enumerateGroupsWithTypes:ALAssetsGroupSavedPhotos 
     usingBlock:^(ALAssetsGroup *group, BOOL *stop) { 

    // Within the group enumeration block, filter to enumerate just videos. 
    [group setAssetsFilter:[ALAssetsFilter allVideos]]; 

    // For this example, we're only interested in the first item. 
    [group enumerateAssetsAtIndexes:[NSIndexSet indexSetWithIndex:0] 
     options:0 
     usingBlock:^(ALAsset *alAsset, NSUInteger index, BOOL *innerStop) { 

     // The end of the enumeration is signaled by asset == nil. 
     if (alAsset) { 
      ALAssetRepresentation *representation = [[alAsset defaultRepresentation] retain]; 
      NSURL *url = [representation url]; 
      AVURLAsset *avAsset = [[AVURLAsset URLAssetWithURL:url options:nil] retain]; 
      AVAssetImageGenerator *imageGenerator = [[AVAssetImageGenerator alloc] initWithAsset:avAsset]; 
      CMTime thumbTime = CMTimeMakeWithSeconds(1, 30); 

      NSError *error; 
      CMTime actualTime; 
      [imageGenerator setMaximumSize:MAXSIZE]; 
      CGImageRef imageRef = [imageGenerator copyCGImageAtTime:thumbTime actualTime:&actualTime error:&error];    
     } 
     }]; 
} 
failureBlock: ^(NSError *error) {NSLog(@"No groups");}];