2013-08-06 30 views
9

我必須實現功能來重複暫停和恢復單個會話中的視頻捕獲,但要將每個新段(每次暫停後捕獲的段)添加到相同的視頻文件,與AVFoundation。目前,每當我按「停止」,然後再次「記錄」,它只是保存一個新的視頻文件到我的iPhone的文檔目錄,並開始捕獲到一個新的文件。我需要能夠按下「錄製/停止」按鈕,只有在記錄處於活動狀態時纔會捕獲錄製的視頻& ...然後當按下「完成」按鈕時,將所有片段放在一起的單個AV文件。所有這些都需要在同一個捕獲會話/預覽會話中進行。我不使用AVAssetWriterInput使用iOS中的AVCaptureMovieFileOutput和AVCaptureVideoDataOutput暫停和恢復視頻捕獲

我能想到的唯一方法就是試着按下「完成」按鈕,將每個單獨的輸出文件合併成一個文件。

此代碼適用於iOS 5,但不適用於iOS 6.實際上,對於iOS 6,我第一次暫停記錄(停止記錄)AVCaptureFileOutputRecordingDelegate方法(captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:)被調用,但在此之後,當我開始記錄代理方法(captureOutput: didFinishRecordingToOutputFileAtURL: fromConnections: error:)再次被調用,但在停止錄製時不調用。

我需要針對該問題的解決方案。請幫幫我。

//View LifeCycle 
- (void)viewDidLoad 
{ 
[super viewDidLoad]; 

self.finalRecordedVideoName = [self stringWithNewUUID]; 

arrVideoName = [[NSMutableArray alloc]initWithCapacity:0]; 
arrOutputUrl = [[NSMutableArray alloc] initWithCapacity:0]; 

CaptureSession = [[AVCaptureSession alloc] init]; 


captureDevices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
if ([captureDevices count] > 0) 
{ 
    NSError *error; 
    VideoInputDevice = [[AVCaptureDeviceInput alloc] initWithDevice:[self backFacingCamera] error:&error]; 
    if (!error) 
    { 
     if ([CaptureSession canAddInput:VideoInputDevice]) 
      [CaptureSession addInput:VideoInputDevice]; 
     else 
      NSLog(@"Couldn't add video input"); 
    } 
    else 
    { 
     NSLog(@"Couldn't create video input"); 
    } 
} 
else 
{ 
    NSLog(@"Couldn't create video capture device"); 
} 



//ADD VIDEO PREVIEW LAYER 
NSLog(@"Adding video preview layer"); 
AVCaptureVideoPreviewLayer *layer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:CaptureSession]; 

[self setPreviewLayer:layer]; 


UIDeviceOrientation currentOrientation = [UIDevice currentDevice].orientation; 

NSLog(@"%d",currentOrientation); 

if (currentOrientation == UIDeviceOrientationPortrait) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortrait; 
} 
else if (currentOrientation == UIDeviceOrientationPortraitUpsideDown) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationPortraitUpsideDown; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeRight) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 
} 
else if (currentOrientation == UIDeviceOrientationLandscapeLeft) 
{ 
    PreviewLayer.orientation = AVCaptureVideoOrientationLandscapeLeft; 
} 

[[self PreviewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

//ADD MOVIE FILE OUTPUT 
NSLog(@"Adding movie file output"); 
MovieFileOutput = [[AVCaptureMovieFileOutput alloc] init]; 
VideoDataOutput = [[AVCaptureVideoDataOutput alloc] init]; 
[VideoDataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()]; 

NSString* key = (NSString*)kCVPixelBufferBytesPerRowAlignmentKey; 
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA]; 
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key]; 

[VideoDataOutput setVideoSettings:videoSettings]; 

Float64 TotalSeconds = 60;   //Total seconds 
int32_t preferredTimeScale = 30; //Frames per second 
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);//<<SET MAX DURATION 
MovieFileOutput.maxRecordedDuration = maxDuration; 
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024; //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

//SET THE CONNECTION PROPERTIES (output properties) 
[self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 
AVCaptureConnection *videoConnection = nil; 

for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 
[CaptureSession setSessionPreset:AVCaptureSessionPresetLow]; 

//----- DISPLAY THE PREVIEW LAYER ----- 

CGRect layerRect = CGRectMake(5, 5, 299, ([[UIScreen mainScreen] bounds].size.height == 568)?438:348); 

[self.PreviewLayer setBounds:layerRect]; 
[self.PreviewLayer setPosition:CGPointMake(CGRectGetMidX(layerRect),CGRectGetMidY(layerRect))]; 

if ([CaptureSession canAddOutput:MovieFileOutput]) 
    [CaptureSession addOutput:MovieFileOutput]; 
    [CaptureSession addOutput:VideoDataOutput]; 
//We use this instead so it goes on a layer behind our UI controls (avoids us having to manually bring each control to the front): 
CameraView = [[UIView alloc] init]; 
[videoPreviewLayer addSubview:CameraView]; 
[videoPreviewLayer sendSubviewToBack:CameraView]; 
[[CameraView layer] addSublayer:PreviewLayer]; 

//----- START THE CAPTURE SESSION RUNNING ----- 
[CaptureSession startRunning]; 
} 

#pragma mark - IBACtion Methods 
-(IBAction)btnStartAndStopPressed:(id)sender 
{ 
UIButton *StartAndStopButton = (UIButton*)sender; 
if ([StartAndStopButton isSelected] == NO) 
{ 
    [StartAndStopButton setSelected:YES]; 
    [btnPauseAndResume setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
    recordingTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(VideoRecording) userInfo:nil repeats:YES]; 

} 
else 
{ 
    [StartAndStopButton setSelected:NO]; 
    [btnPauseAndResume setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 

    NSLog(@"STOP RECORDING"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [((ActOutAppDelegate *)ActOut_AppDelegate) showLoadingViewOnView:self.view withLabel:@"Please wait...."]; 

    if ([recordingTimer isValid]) 
    { 
     [recordingTimer invalidate]; 
     recordingTimer = nil; 
     recordingTime = 30; 
    } 

    stopRecording = YES; 
} 
} 

- (IBAction)btnPauseAndResumePressed:(id)sender 
{ 
UIButton *PauseAndResumeButton = (UIButton*)sender; 
if (PauseAndResumeButton.selected == NO) 
{ 
    PauseAndResumeButton.selected = YES; 
    NSLog(@"recording paused"); 
    WeAreRecording = NO; 

    [MovieFileOutput stopRecording]; 
    [self pauseTimer:recordingTimer]; 

    [btnStartAndStop setEnabled:NO]; 
    [btnBack setEnabled:YES]; 
    [btnSwitchCameraInput setHidden:NO]; 
} 
else 
{ 
    PauseAndResumeButton.selected = NO; 
    NSLog(@"recording resumed"); 

    [btnStartAndStop setEnabled:YES]; 
    [btnBack setEnabled:NO]; 
    [btnSwitchCameraInput setHidden:YES]; 

    WeAreRecording = YES; 

    NSDate *date = [NSDate date]; 
    NSLog(@" date %@",date); 

    NSArray *paths     = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask, YES); 
    NSString *recordedFileName = nil; 
    recordedFileName = [NSString stringWithFormat:@"output%@.mov",date]; 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    self.outputPath     = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@",recordedFileName]]; 
    NSLog(@"%@",self.outputPath); 

    [arrVideoName addObject:recordedFileName]; 

    NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:self.outputPath]; 
    if ([[NSFileManager defaultManager] fileExistsAtPath:self.outputPath]) 
    { 
     NSError *error; 
     if ([[NSFileManager defaultManager] removeItemAtPath:self.outputPath error:&error] == NO) 
     { 
      //Error - handle if requried 
     } 
    } 
    [self resumeTimer:recordingTimer]; 
    //Start recording 
    [MovieFileOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
} 
} 

- (void) CameraSetOutputProperties 
{ 
//SET THE CONNECTION PROPERTIES (output properties) 
AVCaptureConnection *CaptureConnection = [MovieFileOutput connectionWithMediaType:AVMediaTypeVideo]; 

[CaptureConnection setVideoOrientation:AVCaptureVideoOrientationPortrait]; 
//Set frame rate (if requried) 
CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 

if (CaptureConnection.supportsVideoMinFrameDuration) 
    CaptureConnection.videoMinFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 
if (CaptureConnection.supportsVideoMaxFrameDuration) 
    CaptureConnection.videoMaxFrameDuration = CMTimeMake(1, CAPTURE_FRAMES_PER_SECOND); 

CMTimeShow(CaptureConnection.videoMinFrameDuration); 
CMTimeShow(CaptureConnection.videoMaxFrameDuration); 
} 

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position 
{ 
NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
for (AVCaptureDevice *Device in Devices) 
{ 
    if ([Device position] == Position) 
    { 
     NSLog(@"%d",Position); 
     return Device; 
    } 
} 
return nil; 
} 

#pragma mark - AVCaptureFileOutputRecordingDelegate Method 

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 

if(videoWriterInput.readyForMoreMediaData && WeAreRecording) [videoWriterInput appendSampleBuffer:sampleBuffer]; 

for(AVCaptureConnection *captureConnection in [captureOutput connections]) 
{ 
    if ([captureConnection isVideoOrientationSupported]) 
    { 
     AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationLandscapeLeft; 
     [captureConnection setVideoOrientation:orientation]; 
    } 
}  

UIDeviceOrientation curOr = [[UIDevice currentDevice] orientation]; 

CGAffineTransform t; 

if (curOr == UIDeviceOrientationPortrait) 
{ 
    t = CGAffineTransformMakeRotation(-M_PI/2); 
} 
else if (curOr == UIDeviceOrientationPortraitUpsideDown) 
{ 
    t = CGAffineTransformMakeRotation(M_PI/2); 
} 
else if (curOr == UIDeviceOrientationLandscapeRight) 
{ 
    t = CGAffineTransformMakeRotation(M_PI); 
} 
else 
{ 
    t = CGAffineTransformMakeRotation(0); 
} 
} 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error 
{ 
NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 
NSLog(@"output file url : %@", [outputFileURL absoluteString]); 

BOOL RecordedSuccessfully = YES; 
if ([error code] != noErr) 
{ 
    // A problem occurred: Find out if the recording was successful. 
    id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
    if (value) 
    { 
     RecordedSuccessfully = [value boolValue]; 
    } 
} 
AVCaptureConnection *videoConnection=nil; 
for (AVCaptureConnection *connection in [MovieFileOutput connections]) 
{ 
    NSLog(@"%@", connection); 
    for (AVCaptureInputPort *port in [connection inputPorts]) 
    { 
     NSLog(@"%@", port); 
     if ([[port mediaType] isEqual:AVMediaTypeVideo]) 
     { 
      videoConnection = connection; 
     } 
    } 
} 

if([videoConnection isVideoOrientationSupported]) // **Here it is, its always false** 
{ 
    [videoConnection setVideoOrientation:[[UIDevice currentDevice] orientation]]; 
} NSLog(@"Setting image quality"); 

NSData *videoData = [NSData dataWithContentsOfURL:outputFileURL]; 
[videoData writeToFile:self.outputPath atomically:NO]; 

[arrOutputUrl addObject:outputFileURL]; 

if (stopRecording) 
{ 
    [self mergeMultipleVideo]; 
} 
} 

//Method to merge multiple audios 
-(void)mergeMultipleVideo 
{ 
mixComposition = [AVMutableComposition composition]; 

AVMutableCompositionTrack *compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTime nextClipStartTime = kCMTimeZero; 
NSLog(@"Array of output file url : %@", arrOutputUrl); 
if (arrOutputUrl.count > 0) 
{ 
    for(int i = 0 ;i < [arrOutputUrl count];i++) 
    { 
     AVURLAsset* VideoAsset = [[AVURLAsset alloc]initWithURL:[arrOutputUrl objectAtIndex:i] options:nil]; 

     CMTimeRange timeRangeInAsset; 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [VideoAsset duration]); 

     [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, VideoAsset.duration) ofTrack:[[VideoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 
     nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    } 
} 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mov",self.finalRecordedVideoName]]; 
NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
exportSession.outputURL=url; 
exportSession.outputFileType = AVFileTypeQuickTimeMovie; 
exportSession.shouldOptimizeForNetworkUse = YES; 
[exportSession exportAsynchronouslyWithCompletionHandler:^{ 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     [self exportDidFinish:exportSession path:myPathDocs]; 
    }); 
}]; 
} 

-(void)exportDidFinish:(AVAssetExportSession*)session path:(NSString*)outputVideoPath 
{ 
NSLog(@"session.status : %d",session.status); 
if (session.status == AVAssetExportSessionStatusCompleted) 
{ 
    NSURL *outputURL = session.outputURL; 

    NSData *videoData = [NSData dataWithContentsOfURL:outputURL]; 
    [videoData writeToFile:outputVideoPath atomically:NO]; 

    if ([arrVideoName count] > 0) 
    { 
     for (int i = 0; i < [arrVideoName count]; i++) 
     { 
      NSArray* documentPaths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
      NSString* fullFilePath = [[documentPaths objectAtIndex:0] stringByAppendingPathComponent: [NSString stringWithFormat:@"%@",[arrVideoName objectAtIndex:i]]]; 

      NSLog(@"Full path of file to be deleted: %@",fullFilePath); 

      NSFileManager *fileManager = [NSFileManager defaultManager]; 
      NSError *error; 

      if ([fileManager fileExistsAtPath:fullFilePath]) 
      { 
       [fileManager removeItemAtPath:fullFilePath error:&error]; 
      } 
     } 
     [arrVideoName removeAllObjects]; 
    } 
    if (arrOutputUrl.count > 0) 
    { 
     [arrOutputUrl removeAllObjects]; 
    } 
    [((ActOutAppDelegate *)ActOut_AppDelegate) removeLoadingViewfromView:self.view]; 
    [self.view addSubview:afterRecordingPopupView]; 
} 
} 
+0

你有沒有得到你的解決方案? –

回答

0

查看AVCaptureConnection的enabled屬性。對於輸出連接,將啓用設置爲否,而不是停止會話。

+0

我試過了。但是,當我設置啓用我的AVCaptureConnection爲NO, - (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)連接錯誤:(NSError *)錯誤被稱爲哪些不是我想。 –