2016-07-02 57 views
0

這行代碼,它使用從CGBitmapContextCreateImage Quartz框架,創建一個主要內存泄漏:CGBitmapContextCreateImage存儲器泄漏

CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext); 

是否有任何其他方法或代碼,以代替取代此。

整個方法如下:

- (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler 
    { 
     dispatch_suspend(_captureQueue); 
     AVCaptureConnection *videoConnection = nil; 
     for (AVCaptureConnection *connection in self.stillImageOutput.connections) 
    { 
     for (AVCaptureInputPort *port in [connection inputPorts]) 
     { 
      if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
      { 
       videoConnection = connection; 
       break; 
      } 
     } 
    if (videoConnection) break; 
    } 
    __weak typeof(self) weakSelf = self; 
    [self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) 
    { 
     if (error) 
     { 
      dispatch_resume(_captureQueue); 
      return; 
     } 
     __block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"]; 
    @autoreleasepool 
    { 
     NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; 
     CIImage *ciImage = [[CIImage alloc] initWithData:imageData options:@{kCIImageColorSpace:[NSNull null]}]; 
     enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage]; 
     if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence)) 
     { 
      CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]]; 
      if (rectangleFeature) 
      { 
       [self correctPerspectiveForImageWithFeatures:rectangleFeature]; 
      } 
     } 
     CIFilter *transform = [CIFilter filterWithName:@"CIAffineTransform"]; 
     [transform setValue:enhancedImage forKey:kCIInputImageKey]; 
     NSValue *rotation = [NSValue valueWithCGAffineTransform:CGAffineTransformMakeRotation(-90 * (M_PI/180))]; 
     [transform setValue:rotation forKey:@"inputTransform"]; 
     enhancedImage = [transform outputImage]; 
     if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return; 
     static CIContext *ctx = nil; 
     if (!ctx) 
     { 
      ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}]; 
     } 
     CGSize bounds = enhancedImage.extent.size; 
     bounds = CGSizeMake(floorf(bounds.width/4) * 4,floorf(bounds.height/4) * 4); 
     CGRect extent = CGRectMake(enhancedImage.extent.origin.x, enhancedImage.extent.origin.y, bounds.width, bounds.height); 
     static int bytesPerPixel = 8; 
     uint rowBytes = bytesPerPixel * bounds.width; 
     uint totalBytes = rowBytes * bounds.height; 
     uint8_t *byteBuffer = (uint8_t *)malloc(totalBytes); 
     CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
     [ctx render:enhancedImage toBitmap:byteBuffer rowBytes:rowBytes bounds:extent format:kCIFormatRGBA8 colorSpace:colorSpace]; 
     CGContextRef bitmapContext = CGBitmapContextCreate(byteBuffer,bounds.width,bounds.height,bytesPerPixel,rowBytes,colorSpace,kCGImageAlphaNoneSkipLast); 
     free(byteBuffer); 

     CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext); 

     CGContextRelease(bitmapContext); 
     CGColorSpaceRelease(colorSpace); 
     saveCGImageAsJPEGToFilePath(imgRef, filePath); 
     CFRelease(imgRef); 
     dispatch_async(dispatch_get_main_queue(),^
         { 
          completionHandler(filePath); 
          dispatch_resume(_captureQueue); 
         }); 
     _imageDedectionConfidence = 0.0f; 
    } 
}]; 

}

+0

圖像你能比單行發佈更多的代碼?也許整個方法是有用的。 –

回答

0

我通過使代碼更大量瘦和除去一些不必要的部分,包括上面的代碼泄漏線固定的問題。

- (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler 
    { 
     dispatch_suspend(_captureQueue); 
     AVCaptureConnection *videoConnection = nil; 
     for (AVCaptureConnection *connection in self.stillImageOutput.connections) 
     { 
     for (AVCaptureInputPort *port in [connection inputPorts]) 
     { 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
      break; 
     } 
     } 
    if (videoConnection) break; 
    } 
__weak typeof(self) weakSelf = self; 
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) 
{ 
    if (error) 
    { 
     dispatch_resume(_captureQueue); 
     return; 
    } 
    __block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"]; 
    @autoreleasepool 
    { 
     NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer]; 
     UIImage *img = [UIImage imageWithData:imageData];imageData=nil; 
     CGSize newSize = CGSizeMake(img.size.width, img.size.height); 
     UIGraphicsBeginImageContext(newSize); 
     [img drawInRect:CGRectMake(0,0,newSize.width,newSize.height)]; 
     UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext(); 
     UIGraphicsEndImageContext(); 
     CIImage *ciImage = [[CIImage alloc]initWithImage:newImage];    
     enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage]; 
     if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence)) 
     { 
      CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]]; 
      if (rectangleFeature) 
      { 
       enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature]; 
      } 
     } 
     if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return; 
     CGRect rect; 
     rect = [enhancedImage extent]; 
     if (!ctx) 
     { 
      ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}]; 
     }    
     CGImageRef imgRef = [ctx createCGImage:enhancedImage fromRect:rect];    
     saveCGImageAsJPEGToFilePath(imgRef, filePath);    
     CFRelease(imgRef);    
     dispatch_async(dispatch_get_main_queue(),^
         { 
          completionHandler(filePath); 
          dispatch_resume(_captureQueue); 
         }); 
     _imageDedectionConfidence = 0.0f; 
    } 
}]; 
} 
0

您使用CGBitmapContextCreateImage()來創建一個CGImage,但你還沒有發佈的CGImage。

一旦你得到了UIImage的釋放CGImageRef,這將有助於你

獲得從

CGImageRef imgRef = CGBitmapContextCreateImage(bmContext); 
image = [UIImage imageWithCGImage:imgRef]; 
CGImageRelease(imgRef);