1
我使用AVFoundation捕獲了視頻,並且當我試圖將捕獲的視頻轉換爲幀時,我的應用程序因內存不足而崩潰。我正在使用啓用了ios 5.1和ARC的項目。在iOS 5.1(使用ARC)將視頻轉換爲幀時應用程序內存不足的崩潰
這裏我的代碼如下:
-(void) makeVideoCall
{
videoOutput = [[AVCaptureVideoDataOutput alloc]init];
[captureSession addOutput:videoOutput];
videoOutput.videoSettings =
[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
forKey:(id)kCVPixelBufferPixelFormatTypeKey];
videoOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[videoOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
[videoOutput setSampleBufferDelegate:self queue:queue];
[captureSession startRunning];
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
NSLog(@"\n Inside CaptureOutput....");
CGImageRef tempImage = [self imageRefFromSampleBuffer:sampleBuffer];
image = [[UIImage alloc] initWithCGImage:tempImage scale:0.2 orientation:UIImageOrientationLeftMirrored];
}
-(CGImageRef)imageRefFromSampleBuffer:(CMSampleBufferRef)sampleBuffer {
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0);
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(context);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);
return newImage;
}
任何人都請指點我該怎麼辦。
Regards, Monish。