2013-08-20 30 views
1

在這裏,我使用AVFoundation框架來記錄視頻,並且我試圖一次使用前後凸輪,但是如果前凸輪工作回凸輪在這裏凍結第一幀和vise versa.can capturesession一次有多個對象。有代碼可以幫助我的任何人。可以在avcam捕捉會話一次有多個對象

- (void)viewDidLoad 
{ 

[super viewDidLoad]; 
self.view.frame = CGRectMake(0, 0, 320, 568); 
self.view.backgroundColor = [UIColor clearColor]; 
ActiveIndicatorView = [[UIView alloc]initWithFrame:CGRectMake(0, 0, 320, 250)]; 
ActiveIndicatorView.backgroundColor = [UIColor clearColor]; 
[self.view addSubview:ActiveIndicatorView]; 
ActiveIndicatorViewBack = [[UIView alloc]initWithFrame:CGRectMake(0, 290, 320, 250)]; 
ActiveIndicatorViewBack.backgroundColor = [UIColor greenColor]; 
[self.view addSubview:ActiveIndicatorViewBack]; 
tracksArray = [[NSMutableArray alloc] 
       initWithCapacity:2]; 
[self startRecording]; 

} 

-(void)startRecording{ 

[self StartCameraTwo];//front camera 
[self StartCamera];//back camera 

} 
-(void)StartCamera{ 

// NSLog(@"Setting up capture session"); 
CaptureSessionBack = [[AVCaptureSession alloc] init]; 
//ADD VIDEO INPUT 

AVCaptureDevice *VideoDeviceBack = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
// VideoDeviceBack = [self frontCamera]; 

if (VideoDeviceBack) 
{ 
    NSError *error; 
    VideoInputDeviceBack = [AVCaptureDeviceInput deviceInputWithDevice:VideoDeviceBack error:&error]; 
    if (!error) 
    { 
     if ([CaptureSessionBack canAddInput:VideoInputDeviceBack]) 
      [CaptureSessionBack addInput:VideoInputDeviceBack]; 
     else 
      NSLog(@""); 
    } 
    else 
    { 
     //NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
} 
AVCaptureDevice *audioCaptureDeviceBack = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
NSError *error = nil; 
AVCaptureDeviceInput *audioInputBack = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDeviceBack error:&error]; 
if (audioInputBack) 
{ 
    [CaptureSessionBack addInput:audioInputBack]; 
} 
previewBack = [self videoPreviewWithFrameBack :ActiveIndicatorViewBack.bounds]; 
previewBack.backgroundColor = [UIColor grayColor]; 
[ActiveIndicatorViewBack addSubview:previewBack]; 
    // [preview addSubview:vie]; 
MovieFileOutputBack = [[AVCaptureMovieFileOutput alloc] init]; 
MovieFileOutputBack.minFreeDiskSpaceLimit = 1024 * 1024;      //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

if ([CaptureSessionBack canAddOutput:MovieFileOutputBack]) 
    [CaptureSessionBack addOutput:MovieFileOutputBack]; 

//SET THE CONNECTION PROPERTIES (output properties) 
[self CameraSetOutputPropertiesBack];   //(We call a method as it also has to be done after changing camera) 

//NSLog(@"Setting image quality"); 
[CaptureSessionBack setSessionPreset:AVCaptureSessionPreset640x480]; 
if ([CaptureSessionBack canSetSessionPreset:AVCaptureSessionPreset640x480]) { //Check size based configs are supported before setting them 
    [CaptureSessionBack setSessionPreset:AVCaptureSessionPreset640x480]; 


} 

if ([VideoDeviceBack isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [VideoDeviceBack lockForConfiguration:&error]){ 
    [VideoDeviceBack setFocusMode:AVCaptureFocusModeAutoFocus]; 
    if ([VideoDeviceBack isFocusPointOfInterestSupported]) 
     [VideoDeviceBack setFocusPointOfInterest:CGPointMake(0.5f,0.5f)]; 
    [VideoDeviceBack unlockForConfiguration]; 
} 

[CaptureSessionBack startRunning]; 

} 
-(void)StartCameraTwo{ 
// NSLog(@"Setting up capture session"); 
CaptureSession = [[AVCaptureSession alloc] init]; 
//ADD VIDEO INPUT 

AVCaptureDevice *VideoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
VideoDevice = [self frontCamera]; 

if (VideoDevice) 
{ 
    NSError *error; 
    VideoInputDevice = [AVCaptureDeviceInput deviceInputWithDevice:VideoDevice error:&error]; 
    if (!error) 
    { 
     if ([CaptureSession canAddInput:VideoInputDevice]) 
      [CaptureSession addInput:VideoInputDevice]; 
     else 
      NSLog(@""); 
    } 
    else 
    { 
     //NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 

} 

AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 
NSError *error = nil; 
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error]; 
if (audioInput) 
{ 
    [CaptureSession addInput:audioInput]; 
} 

preview = [self videoPreviewWithFrame:ActiveIndicatorView.bounds]; 
ActiveIndicatorView.backgroundColor = [UIColor redColor]; 
[ActiveIndicatorView addSubview:preview]; 
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;      //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

if ([CaptureSession canAddOutput:MovieFileOutput]) 
    [CaptureSession addOutput:MovieFileOutput]; 

//SET THE CONNECTION PROPERTIES (output properties) 
[self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 

//NSLog(@"Setting image quality"); 
[CaptureSession setSessionPreset:AVCaptureSessionPreset640x480]; 
if ([CaptureSession canSetSessionPreset:AVCaptureSessionPreset640x480]) { //Check size based configs are supported before setting them 
    [CaptureSession setSessionPreset:AVCaptureSessionPreset640x480]; 


} 

if ([VideoDevice isFocusModeSupported:AVCaptureFocusModeAutoFocus] && [VideoDevice lockForConfiguration:&error]){ 
    [VideoDevice setFocusMode:AVCaptureFocusModeAutoFocus]; 
    if ([VideoDevice isFocusPointOfInterestSupported]) 
     [VideoDevice setFocusPointOfInterest:CGPointMake(0.5f,0.5f)]; 
    [VideoDevice unlockForConfiguration]; 
} 

[CaptureSession startRunning]; 
} 

- (void) CameraSetOutputProperties 
{ 
//SET THE CONNECTION PROPERTIES (output properties) 
CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 

//Set landscape (if required) 
if ([CaptureConnection isVideoOrientationSupported]) 
{ 
    AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;  //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE 
    [CaptureConnection setVideoOrientation:orientation]; 

} 

//Set frame rate (if requried) 
CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 

if (CaptureConnection.supportsVideoMinFrameDuration) 
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 
if (CaptureConnection.supportsVideoMaxFrameDuration) 
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 

CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 
} 

    - (void) CameraSetOutputPropertiesBack 
    { 
//SET THE CONNECTION PROPERTIES (output properties) 
CaptureConnectionBack = [MovieFileOutputBack connectionWithMediaType:AVMediaTypeVideo]; 

//Set landscape (if required) 
if ([CaptureConnectionBack isVideoOrientationSupported]) 
{ 
    AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft;  //<<<<<SET VIDEO ORIENTATION IF LANDSCAPE 
    [CaptureConnectionBack setVideoOrientation:orientation]; 

} 

//Set frame rate (if requried) 
CMTimeShow(CaptureConnectionBack.videoMinFrameDuration); 
CMTimeShow(CaptureConnectionBack.videoMaxFrameDuration); 

if (CaptureConnectionBack.supportsVideoMinFrameDuration) 
    CaptureConnectionBack.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 
if (CaptureConnectionBack.supportsVideoMaxFrameDuration) 
    CaptureConnectionBack.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 

CMTimeShow(CaptureConnectionBack.videoMinFrameDuration); 
CMTimeShow(CaptureConnectionBack.videoMaxFrameDuration); 
} 
- (AVCaptureDevice *)frontCamera { 
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
for (AVCaptureDevice *device in devices) { 
    if ([device position] == AVCaptureDevicePositionFront) { 
     return device; 
    } 
} 
return nil; 
    } 

回答

0

目前無法同時使用前置和後置攝像頭。