2016-03-03 59 views
-1

這是斯威夫特語言版本(源地址:Swift Version.)?如何通過iPhone攝像頭做了實時人臉檢測

這是我codee.Anybody可以幫我找到它沒理由不工作?

#import "ViewController.h" 
#import <AVFoundation/AVFoundation.h> 

@interface ViewController()<AVCaptureVideoDataOutputSampleBufferDelegate>{ 
AVCaptureSession *captureSession; 
AVCaptureDevice *captureDevice; 
AVCaptureVideoPreviewLayer *previewLayer; 
UIImage *resultImage; 
BOOL isStart; 


} 

@end 

@implementation ViewController 

- (void)viewDidLoad { 
[super viewDidLoad]; 
isStart = NO; 
[self isStartTrue]; 
captureSession = [[AVCaptureSession alloc]init]; 
captureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

captureSession.sessionPreset = AVCaptureSessionPresetLow; 
NSArray *devices = [[NSArray alloc]init]; 
devices = [AVCaptureDevice devices]; 
for (AVCaptureDevice *device in devices) { 
    if ([device hasMediaType:AVMediaTypeVideo]) { 
     if (device.position == AVCaptureDevicePositionFront) { 
      captureDevice = device; 
      if (captureDevice != nil) { 
       NSLog(@"Capture Device found"); 
       [self beginSession]; 
      } 
     } 
    } 
} 
// Do any additional setup after loading the view, typically from a nib. 
} 


-(void) isStartTrue { 
isStart = YES; 
} 

-(void)beginSession { 
AVCaptureDeviceInput *captureDeviceInput = [[AVCaptureDeviceInput alloc]initWithDevice:captureDevice error:nil]; 
[captureSession addInput:captureDeviceInput]; 
AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc]init]; 
dispatch_queue_t cameraQueue; 
cameraQueue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL); 
[output setSampleBufferDelegate:self queue:cameraQueue]; 
NSDictionary *videoSettings = [[NSDictionary alloc] initWithObjectsAndKeys:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA],kCVPixelBufferPixelFormatTypeKey, nil]; 
output.videoSettings = videoSettings; 
[captureSession addOutput:output]; 
previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:captureSession]; 
previewLayer.videoGravity = @"AVLayerVideoGravityResizeAspect"; 
previewLayer.frame = self.view.bounds; 
[self.view.layer addSublayer:previewLayer]; 
[captureSession startRunning]; 

} 


-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 

if (isStart) { 

    resultImage = [[UIImage alloc] init]; 
    resultImage = [self sampleBufferToImage:sampleBuffer]; 

    CIContext *context = [CIContext contextWithOptions:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:kCIContextUseSoftwareRenderer]]; 
    CIDetector *detector = [CIDetector detectorOfType:CIDetectorTypeFace context:context options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]]; 
    CIImage *ciImage = [[CIImage alloc]init]; 
    ciImage = [CIImage imageWithCGImage:resultImage.CGImage]; 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     previewIV.image = resultImage; 
    }); 
    NSArray *results = [detector featuresInImage:ciImage options:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:6] forKey:CIDetectorImageOrientation]]; 
    for (CIFaceFeature *face in results) { 
     UIImage *faceImage = [UIImage imageWithCGImage:[context createCGImage:ciImage fromRect:face.bounds] scale:1.0 orientation:UIImageOrientationRight]; 
     NSLog(@"  ====%@", NSStringFromCGRect(face.bounds)); 


    } 

} 
} 


-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{ 

CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 

void * baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); 
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
size_t width = CVPixelBufferGetWidth(imageBuffer); 
size_t height = CVPixelBufferGetHeight(imageBuffer); 
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 
int bitsPerCompornent = 8; 
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, bitsPerCompornent, bytesPerRow, colorSpace, (kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst)); 
CGImageRef imageRef = CGBitmapContextCreateImage(context); 
UIImage *result = [[UIImage alloc]initWithCGImage:imageRef scale:1.0 orientation:UIImageOrientationRight]; 
return result; 

} 

- (void)didReceiveMemoryWarning { 
[super didReceiveMemoryWarning]; 
[captureSession stopRunning]; 
// Dispose of any resources that can be recreated. 
} 

@end 

這是檢測視圖控制器的整個代碼。

您可以複製此內容並導入AVdoundation.framework和coremedia.framework。

+1

它以什麼方式不起作用?它是否會崩潰,你是否會遇到錯誤,你需要更具體一些,而不僅僅是要求人們讓你的代碼「工作」 – Russell

+0

好,就像羅素先生說的那樣。我附加了一些更詳細的情況信息。並沒有得到任何錯誤。它只是沒有正確地檢測到我的臉,然後NSLog爲我的信息。 – Hashira

回答

1

如果你看看你的previewIV,你會發現它是一個空的圖像。

因此,我改變你的sampleBufferToImage方法作爲打擊,它的工作原理。

-(UIImage *)sampleBufferToImage:(CMSampleBufferRef)sampleBuffer{ 

    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CIImage *ciImage = [CIImage imageWithCVPixelBuffer:imageBuffer]; 
    CIContext *temporaryContext = [CIContext contextWithOptions:nil]; 
    CGImageRef videoImage = [temporaryContext 
          createCGImage:ciImage 
          fromRect:CGRectMake(0, 0, 
               CVPixelBufferGetWidth(imageBuffer), 
               CVPixelBufferGetHeight(imageBuffer))]; 
    UIImage *result = [[UIImage alloc] initWithCGImage:videoImage]; 
    CGImageRelease(videoImage); 
    return result; 
} 
+0

謝謝你的回答。它可以在編輯我的代碼後捕捉照相機圖像,如您所說。我接受了您的答案。再次感謝您! – Hashira