2016-05-26 76 views

回答

2

嗯..最終我做了什麼Nadav告訴我,在他的博客 - 發現DAL設備和使用AVCaptureSession這樣捕獲它們的輸出:

-(id) init { 

    // Allow iOS Devices Discovery 
    CMIOObjectPropertyAddress prop = 
    { kCMIOHardwarePropertyAllowScreenCaptureDevices, 
     kCMIOObjectPropertyScopeGlobal, 
     kCMIOObjectPropertyElementMaster }; 
    UInt32 allow = 1; 
    CMIOObjectSetPropertyData(kCMIOObjectSystemObject, 
           &prop, 0, NULL, 
           sizeof(allow), &allow); 

    // Get devices 
    NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed]; 
    BOOL deviceAttahced = false; 
    for (int i = 0; i < [devices count]; i++) { 
     AVCaptureDevice *device = devices[i]; 
     if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) { 
      deviceAttahced = true; 
      [self startSession:device]; 
      break; 
     } 
    } 

    NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; 

    // Device not attached - subscribe to onConnect notifications 
    if (!deviceAttahced) { 


     id deviceWasConnectedObserver = [notificationCenter addObserverForName:AVCaptureDeviceWasConnectedNotification 
                     object:nil 
                     queue:[NSOperationQueue mainQueue] 
                    usingBlock:^(NSNotification *note) { 
                     AVCaptureDevice *device = note.object; 
                     [self deviceConnected:device]; 
                    }]; 

     observers = [[NSArray alloc] initWithObjects:deviceWasConnectedObserver, nil]; 
    } 

    return self; 
} 

- (void) deviceConnected:(AVCaptureDevice *)device { 
    if ([[device uniqueID] isEqualToString:/*deviceUDID*/]) { 
     [self startSession:device]; 
    } 
} 

- (void) startSession:(AVCaptureDevice *)device { 

    // Init capturing session 
    session = [[AVCaptureSession alloc] init]; 

    // Star session configuration 
    [session beginConfiguration]; 

    // Add session input 
    NSError *error; 
    newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; 
    if (newVideoDeviceInput == nil) { 
     dispatch_async(dispatch_get_main_queue(), ^(void) { 
      NSLog(@"%@", error); 
     }); 
    } else { 
     [session addInput:newVideoDeviceInput]; 
    } 

    // Add session output 
    videoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
    videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey]; 

    dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL); 

    [videoDataOutput setSampleBufferDelegate:self queue:videoQueue]; 
    [session addOutput:videoDataOutput]; 

    // Finish session configuration 
    [session commitConfiguration]; 

    // Start the session 
    [session startRunning]; 
} 

#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate 

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { 
    NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer]; 

    /* 
    * Here you can do whatever you need with the frame (e.g. convert to JPG) 
    */ 
}