2011-03-25 29 views
5

當我創建第二個AVCaptureSession時,自動對焦在第一個AVCaptureSession上不起作用。要創建的第二個會話是自動對焦工作的第一個會話,並且第一個創建的會話不會自動對焦。如何在不重新創建會話的情況下使自動對焦在第二個AVCaptureSession中工作?

我希望任何一個會話都能夠在自動對焦停止後自動對焦,而另一個會以與自動白平衡和自動曝光一樣的方式停止。如果您使用下面的示例代碼觀察日誌窗口,則可以看到正在通過的鍵值觀察消息;但是在頂級會話運行時從不改變焦點消息。

旁註:不幸的是,我使用的第三方庫中存在一個錯誤,它阻止了我完全重新創建會話,因爲我在它們之間切換(它正在泄漏其最終導致應用程序被終止的AVCaptureSessions)。完整的故事是這個庫爲我創建了一個捕獲會話,它有一個公共API來啓動和停止會話,我希望創建另一個會話。下面的代碼演示了這個問題,但沒有使用第三方庫。

我用下面列出的代碼和一個XIB文件創建了一個測試應用程序,它具有兩個視圖,一個在另一個之上,一個按鈕連接到顯示問題的switchSessions方法。

它可能與這裏描述的問題有關,即 Focus (Autofocus) not working in camera (AVFoundation AVCaptureSession),雖然沒有提到兩個捕獲會話。

頭文件

#import <UIKit/UIKit.h> 

@class AVCaptureSession; 
@class AVCaptureStillImageOutput; 
@class AVCaptureVideoPreviewLayer; 
@class AVCaptureDevice; 
@class AVCaptureDeviceInput; 

@interface AVCaptureSessionFocusBugViewController : UIViewController { 

    IBOutlet UIView *_topView; 
    IBOutlet UIView *_bottomView; 

    AVCaptureDevice *_device; 

    AVCaptureSession *_topSession; 

    AVCaptureStillImageOutput *_outputTopSession; 
    AVCaptureVideoPreviewLayer *_previewLayerTopSession; 
    AVCaptureDeviceInput *_inputTopSession; 

    AVCaptureSession *_bottomSession; 

    AVCaptureStillImageOutput *_outputBottomSession; 
    AVCaptureVideoPreviewLayer *_previewLayerBottomSession; 
    AVCaptureDeviceInput *_inputBottomSession; 
} 

- (IBAction)switchSessions:(id)sender; 

@end 

實現文件:

#import "AVCaptureSessionFocusBugViewController.h" 
#import <AVFoundation/AVFoundation.h> 

@interface AVCaptureSessionFocusBugViewController() 

- (void)setupCaptureSession:(AVCaptureSession **)session 
        output:(AVCaptureStillImageOutput **)output 
       previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer 
         input:(AVCaptureDeviceInput **)input 
         view:(UIView *)view; 

- (void)tearDownSession:(AVCaptureSession **)session 
       output:(AVCaptureStillImageOutput **)output 
      previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer 
        input:(AVCaptureDeviceInput **)input 
        view:(UIView *)view; 

@end 

@implementation AVCaptureSessionFocusBugViewController 

- (IBAction)switchSessions:(id)sender 
{ 
    if ([_topSession isRunning]) { 
     [_topSession stopRunning]; 
     [_bottomSession startRunning]; 
     NSLog(@"Bottom session now running."); 
    } 
    else { 
     [_bottomSession stopRunning]; 
     [_topSession startRunning]; 
     NSLog(@"Top session now running."); 
    } 
} 

- (void)observeValueForKeyPath:(NSString *)keyPath 
         ofObject:(id)object 
         change:(NSDictionary *)change 
         context:(void *)context 
{ 
    NSLog(@"Observed value for key at key path %@.", keyPath); 
    // Enable to confirm that the focusMode is set correctly. 
    //NSLog(@"Autofocus for the device is set to %d.", [_device focusMode]); 
} 

- (void)viewDidLoad { 
    [super viewDidLoad]; 

    _device = [[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo] retain]; 

    [self setupCaptureSession:&_topSession 
         output:&_outputTopSession 
       previewLayer:&_previewLayerTopSession 
         input:&_inputTopSession 
         view:_topView]; 

    [self setupCaptureSession:&_bottomSession 
         output:&_outputBottomSession 
       previewLayer:&_previewLayerBottomSession 
         input:&_inputBottomSession 
         view:_bottomView]; 

    // NB: We only need to observe one device, since the top and bottom sessions use the same device. 
    [_device addObserver:self forKeyPath:@"adjustingFocus" options:NSKeyValueObservingOptionNew context:nil]; 
    [_device addObserver:self forKeyPath:@"adjustingExposure" options:NSKeyValueObservingOptionNew context:nil]; 
    [_device addObserver:self forKeyPath:@"adjustingWhiteBalance" options:NSKeyValueObservingOptionNew context:nil]; 

    [_topSession startRunning]; 
    NSLog(@"Starting top session."); 
} 


- (void)setupCaptureSession:(AVCaptureSession **)session 
        output:(AVCaptureStillImageOutput **)output 
       previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer 
         input:(AVCaptureDeviceInput **)input 
         view:(UIView *)view 
{  
    *session = [[AVCaptureSession alloc] init]; 

    // Create the preview layer. 
    *previewLayer = [[AVCaptureVideoPreviewLayer layerWithSession:*session] retain]; 

    [*previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

    [*previewLayer setFrame:[view bounds]]; 

    [[view layer] addSublayer:*previewLayer]; 

    // Configure the inputs and outputs. 
    [*session setSessionPreset:AVCaptureSessionPresetMedium]; 

    NSError *error = nil; 

    *input = [[AVCaptureDeviceInput deviceInputWithDevice:_device error:&error] retain]; 

    if (!*input) { 
     NSLog(@"Error creating input device:%@", [error localizedDescription]); 
     return; 
    } 

    [*session addInput:*input]; 

    *output = [[AVCaptureStillImageOutput alloc] init]; 

    [*session addOutput:*output]; 

    NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]; 

    [*output setOutputSettings:outputSettings]; 

    [outputSettings release]; 
} 

- (void)viewDidUnload { 
    [_topView release]; 
    _topView = nil; 

    [_bottomView release]; 
    _bottomView = nil; 

    [_device release]; 
    _device = nil; 

    [self tearDownSession:&_topSession 
        output:&_outputTopSession 
      previewLayer:&_previewLayerTopSession 
        input:&_inputTopSession 
        view:_topView]; 

    [self tearDownSession:&_bottomSession 
         output:&_outputBottomSession 
       previewLayer:&_previewLayerBottomSession 
         input:&_inputBottomSession 
         view:_bottomView]; 
} 

- (void)tearDownSession:(AVCaptureSession **)session 
       output:(AVCaptureStillImageOutput **)output 
      previewLayer:(AVCaptureVideoPreviewLayer **)previewLayer 
        input:(AVCaptureDeviceInput **)input 
        view:(UIView *)view 
{ 
    if ([*session isRunning]) { 
     [*session stopRunning]; 
    } 

    [*session removeOutput:*output]; 

    [*output release]; 
    *output = nil; 

    [*session removeInput:*input]; 

    [*input release]; 
    *input = nil; 

    [*previewLayer removeFromSuperlayer]; 

    [*previewLayer release]; 
    *previewLayer = nil; 

    [*session release]; 
    *session = nil; 
} 

@end 

回答

8

Apple技術支持已確認,創建兩個同時捕獲會話不支持。你必須拆除一個,然後創建另一個。

+1

你能否提供源代碼/報價? – 2013-06-12 17:29:03

相關問題