2015-05-28 91 views
1

我試圖在通過AVCaptureSession錄製到磁盤時以CATextLayer的形式寫入時間碼。這裏是我目前爲止的代碼,我找不到任何文檔或通過谷歌向我展示如何這可能。在寫入磁盤之前將CATextLayer覆蓋層寫入AVCaptureSession

最初我使用GPUImage完成了這個,但代碼不穩定並崩潰。圖書館的作者證實,GPUImage目前無法可靠地用於此目的。

CaptureSessionManager.h

#import <CoreMedia/CoreMedia.h> 
#import <AVFoundation/AVFoundation.h> 


@interface CaptureSessionManager:NSObject 

@property (nonatomic,strong) AVCaptureVideoPreviewLayer *previewLayer; 
@property (nonatomic,strong) AVCaptureSession *captureSession; 
@property (nonatomic,strong) AVCaptureMovieFileOutput *captureOutput; 
@property (nonatomic,strong) AVCaptureDeviceInput *videoIn; 

- (void)addVideoPreviewLayer; 
- (void)addVideoInput; 
- (void)addVideoOutput; 
- (void)toggleDeviceCamera; 

- (void)toggleRecording; 

@end 

CaptureSessionManager.m

#import "CaptureSessionManager.h" 
#import <CoreMedia/CoreMedia.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AssetsLibrary/AssetsLibrary.h> 
#import <QuartzCore/QuartzCore.h> 
#import <CoreVideo/CoreVideo.h> 

#define CAPTURE_FRAMES_PER_SECOND  20 

@interface CaptureSessionManager() <AVCaptureFileOutputRecordingDelegate, 
            AVCaptureVideoDataOutputSampleBufferDelegate> { 
    BOOL isRecording; 
} 
@end 

@implementation CaptureSessionManager 
@synthesize captureSession; 
@synthesize previewLayer; 
@synthesize captureOutput; 
@synthesize videoIn; 

#pragma mark Capture Session Configuration 

- (id)init { 
    if ((self = [super init])) { 
     [self setCaptureSession:[[AVCaptureSession alloc] init]]; 
    } 
    return self; 
} 

- (void)addVideoPreviewLayer { 
    [self setPreviewLayer:[[[AVCaptureVideoPreviewLayer alloc] initWithSession:[self captureSession]] autorelease]]; 
    [[self previewLayer] setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

} 

- (void)addVideoInput { 
    AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 
    if (videoDevice) { 
     NSError *error; 
     if ([videoDevice isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus] && 
      [videoDevice lockForConfiguration:&error]) { 
      [videoDevice setFocusMode:AVCaptureFocusModeContinuousAutoFocus]; 
      [videoDevice unlockForConfiguration]; 
     } 
     videoIn = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 
     if (!error) { 
      if ([[self captureSession] canAddInput:videoIn]) { 
       [[self captureSession] addInput:videoIn]; 

      } else 
       NSLog(@"Couldn't add video input"); 
     } 
     else 
      NSLog(@"Couldn't create video input"); 
    } 
    else 
     NSLog(@"Couldn't create video capture device"); 
} 

- (void)addVideoOutput { 
    //ADD MOVIE FILE OUTPUT 
    NSLog(@"Adding movie file output"); 
    captureOutput = [[AVCaptureMovieFileOutput alloc] init]; 

    Float64 TotalSeconds = 60;   //Total seconds 
    int32_t preferredTimeScale = 30; //Frames per second 
    CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale); //<<SET MAX DURATION 
    captureOutput.maxRecordedDuration = maxDuration; 
    captureOutput.minFreeDiskSpaceLimit = 1024 * 1024;      //<<SET MIN FREE SPACE IN BYTES FOR RECORDING TO CONTINUE ON A VOLUME 

    if ([self.captureSession canAddOutput:captureOutput]) 
     [self.captureSession addOutput:captureOutput]; 

    //SET THE CONNECTION PROPERTIES (output properties) 
    [self CameraSetOutputProperties];   //(We call a method as it also has to be done after changing camera) 

    [self.captureSession setSessionPreset:AVCaptureSessionPresetMedium]; 
} 

- (void) CameraSetOutputProperties 
{ 
    //SET THE CONNECTION PROPERTIES (output properties) 
    AVCaptureConnection *CaptureConnection = [captureOutput connectionWithMediaType:AVMediaTypeVideo]; 
} 

- (void)toggleDeviceCamera 
{ 
    if ([[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] count] > 1)  //Only do if device has multiple cameras 
    { 
     NSLog(@"Toggle camera"); 
     NSError *error; 
     //AVCaptureDeviceInput *videoInput = [self videoInput]; 
     AVCaptureDeviceInput *NewVideoInput; 
     AVCaptureDevicePosition position = [[videoIn device] position]; 
     if (position == AVCaptureDevicePositionBack) 
     { 
      NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionFront] error:&error]; 
     } 
     else if (position == AVCaptureDevicePositionFront) 
     { 
      NewVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:[self CameraWithPosition:AVCaptureDevicePositionBack] error:&error]; 
     } 

     if (NewVideoInput != nil) 
     { 
      [self.captureSession beginConfiguration];  //We can now change the inputs and output configuration. Use commitConfiguration to end 
      [self.captureSession removeInput:videoIn]; 
      if ([self.captureSession canAddInput:NewVideoInput]) 
      { 
       [self.captureSession addInput:NewVideoInput]; 
       videoIn = NewVideoInput; 
      } 
      else 
      { 
       [self.captureSession addInput:videoIn]; 
      } 

      //Set the connection properties again 
      [self CameraSetOutputProperties]; 


      [self.captureSession commitConfiguration]; 
      [NewVideoInput release]; 
     } 
    } 
} 

//********** START STOP RECORDING BUTTON ********** 
- (void)toggleRecording { 

    if (!isRecording) 
    { 
     //----- START RECORDING ----- 
     NSLog(@"START RECORDING"); 
     isRecording = YES; 

     //Create temporary URL to record to 
     NSString *outputPath = [[NSString alloc] initWithFormat:@"%@%@", NSTemporaryDirectory(), @"output.mov"]; 
     NSURL *outputURL = [[NSURL alloc] initFileURLWithPath:outputPath]; 
     NSFileManager *fileManager = [NSFileManager defaultManager]; 
     if ([fileManager fileExistsAtPath:outputPath]) 
     { 
      NSError *error; 
      if ([fileManager removeItemAtPath:outputPath error:&error] == NO) 
      { 
       //Error - handle if requried 
      } 
     } 
     //Start recording 
     [captureOutput startRecordingToOutputFileURL:outputURL recordingDelegate:self]; 
    } 
    else 
    { 
     //----- STOP RECORDING ----- 
     NSLog(@"STOP RECORDING"); 
     isRecording = NO; 

     [captureOutput stopRecording]; 
    } 
} 

- (AVCaptureDevice *) CameraWithPosition:(AVCaptureDevicePosition) Position 
{ 
    NSArray *Devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 
    for (AVCaptureDevice *Device in Devices) 
    { 
     if ([Device position] == Position) 
     { 
      return Device; 
     } 
    } 
    return nil; 
} 

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { 
    NSLog(@"a"); 
} 

//********** DID FINISH RECORDING TO OUTPUT FILE AT URL ********** 
- (void)captureOutput:(AVCaptureFileOutput *)captureOutput 
didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL 
     fromConnections:(NSArray *)connections 
       error:(NSError *)error 
{ 

    NSLog(@"didFinishRecordingToOutputFileAtURL - enter"); 

    BOOL RecordedSuccessfully = YES; 
    if ([error code] != noErr) 
    { 
     // A problem occurred: Find out if the recording was successful. 
     id value = [[error userInfo] objectForKey:AVErrorRecordingSuccessfullyFinishedKey]; 
     if (value) 
     { 
      RecordedSuccessfully = [value boolValue]; 
     } 
    } 
    if (RecordedSuccessfully) 
    { 
     //----- RECORDED SUCESSFULLY ----- 
     NSLog(@"didFinishRecordingToOutputFileAtURL - success"); 
     ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
     if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputFileURL]) 
     { 
      [library writeVideoAtPathToSavedPhotosAlbum:outputFileURL 
             completionBlock:^(NSURL *assetURL, NSError *error) 
      { 
       if (error) 
       { 

       } 
      }]; 
     } 
    } 
} 

- (void)dealloc { 
    [super dealloc]; 
} 

@end 

回答

1

嗯,我能得到我的解決方案,我真誠地希望這可以幫助別人。爲了獲得您的基本AVFoundation設置錄製視頻,音頻和寫入磁盤下載RosyWriter從蘋果https://developer.apple.com/library/prerelease/ios/samplecode/RosyWriter/RosyWriter.zip

之後,只需訪問提供的渲染器之一。我強烈建議使用OpenGL渲染器。你需要加強你的UIView繪圖,但這會讓你獲得90%的路徑!

+0

非常感謝發現,我想知道這是否因蘋果最新的API而改變。例如,如果在引入Metal之後不再需要OpenGL。 –