0

我是一位新編程和Objective-C(〜6周),現在我第一次使用AVFoundation。我的目標是適合我的水平,但對於熟悉框架的人來說不應該太難。我無法將AVFoundation視頻保存到本地URL

我的目標是創建一個'Snapchat'風格的自定義相機界面,當您點擊按鈕時捕捉靜止圖像,並在您按住時錄製視頻。

我已經能夠拼湊和粉碎通過大部分的代碼(視頻預覽,拍攝靜止圖像,編程按鈕等),但我不能成功地保存在本地視頻(將在本週晚些時候將其添加到在Parse之上構建的項目)。

ViewController.h (參考)

#import <UIKit/UIKit.h> 
#import <AVFoundation/AVFoundation.h> 

@interface ViewController : UIViewController 

@property UIButton *button; 
@property UIButton *saveButton; 
@property UIImageView *previewView; 

#define VIDEO_FILE @"test.mov" 

@end 

ViewController.m

我已經構造我的代碼是我在初始化第一組方法的會議的方式,然後將圖像和視頻捕獲分解成各自的部分。輸入設備是AVMediaTypeVideo,它分別輸出到AVCaptureStillImageOutput和AVCaptureMovieFileOutput。

#import "ViewController.h" 

@interface ViewController() <AVCaptureFileOutputRecordingDelegate> 

@end 

@implementation ViewController 
AVCaptureSession *session; 
AVCaptureStillImageOutput *imageOutput; 
AVCaptureMovieFileOutput *movieOutput; 
AVCaptureConnection *videoConnection; 

- (void)viewDidLoad { 
    [super viewDidLoad]; 
    [self testDevices]; 
    self.view.backgroundColor = [UIColor blackColor]; 

    //Image preview 
    self.previewView = [[UIImageView alloc]initWithFrame:self.view.frame]; 
    self.previewView.backgroundColor = [UIColor whiteColor]; 
    self.previewView.contentMode = UIViewContentModeScaleAspectFill; 
    self.previewView.hidden = YES; 
    [self.view addSubview:self.previewView]; 

    //Buttons 
    self.button = [self createButtonWithTitle:@"REC" chooseColor:[UIColor redColor]]; 
    UILongPressGestureRecognizer *longPressRecognizer = [[UILongPressGestureRecognizer alloc]initWithTarget:self action:@selector(handleLongPressGesture:)]; 
    [self.button addGestureRecognizer:longPressRecognizer]; 
    [self.button addTarget:self action:@selector(captureImage) forControlEvents:UIControlEventTouchUpInside]; 

    self.saveButton = [self createSaveButton]; 
    [self.saveButton addTarget:self action:@selector(saveActions) forControlEvents:UIControlEventTouchUpInside]; 
} 

- (void)viewWillAppear:(BOOL)animated { 
    //Tests 
    [self initializeAVItems]; 
    NSLog(@"%@", videoConnection); 
    NSLog(@"%@", imageOutput.connections); 
    NSLog(@"%@", imageOutput.description.debugDescription); 
} 

#pragma mark - AV initialization 

- (void)initializeAVItems { 
    //Start session, input 
    session = [[AVCaptureSession alloc]init]; 
    [session setSessionPreset:AVCaptureSessionPresetPhoto]; 

    AVCaptureDevice *inputDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 

    NSError *error; 
    AVCaptureDeviceInput *deviceInput = [AVCaptureDeviceInput deviceInputWithDevice:inputDevice error:&error]; 
    if ([session canAddInput:deviceInput]) { 
     [session addInput:deviceInput]; 
    } else { 
     NSLog(@"%@", error); 
    } 

    AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:session]; 
    [previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 

    //Layer preview 
    CALayer *viewLayer = [[self view] layer]; 
    [viewLayer setMasksToBounds:YES]; 

    CGRect frame = self.view.frame; 
    [previewLayer setFrame:frame]; 
    [viewLayer insertSublayer:previewLayer atIndex:0]; 

    //Image Output 
    imageOutput = [[AVCaptureStillImageOutput alloc] init]; 
    NSDictionary *imageOutputSettings = [[NSDictionary alloc]initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil]; 
    imageOutput.outputSettings = imageOutputSettings; 

    //Video Output 
    movieOutput = [[AVCaptureMovieFileOutput alloc] init]; 


    [session addOutput:movieOutput]; 
    [session addOutput:imageOutput]; 
    [session startRunning]; 
} 

- (void)testDevices { 
    NSArray *devices = [AVCaptureDevice devices]; 
    for (AVCaptureDevice *device in devices) { 
     NSLog(@"Device name: %@", [device localizedName]); 
     if ([device hasMediaType:AVMediaTypeVideo]) { 
      if ([device position] == AVCaptureDevicePositionBack) { 
       NSLog(@"Device position : back"); 
      } 
      else { 
       NSLog(@"Device position : front"); 
      } 
     } 
    } 
} 

#pragma mark - Image capture 

- (void)captureImage { 
    AVCaptureConnection *videoConnection = nil; 
    for (AVCaptureConnection *connection in imageOutput.connections) { 
     for (AVCaptureInputPort *port in [connection inputPorts]) { 
      if ([[port mediaType] isEqual:AVMediaTypeVideo]) { 
       videoConnection = connection; 
       break; 
      } 
     } 
     if (videoConnection) { 
      break; 
     } 
    } 
    NSLog(@"Requesting capture from: %@", imageOutput); 
    [imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 
     if (imageDataSampleBuffer != NULL) { 
      NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer]; 
      UIImage *image = [UIImage imageWithData:imageData]; 
      self.previewView.image = image; 
      self.previewView.hidden = NO; 
     } 
    }]; 
    [self saveButtonFlyIn:self.saveButton]; 
} 

#pragma mark - Video capture 

- (void)captureVideo { 
    NSLog(@"%@", movieOutput.connections); 
    [[NSFileManager defaultManager] removeItemAtURL:[self outputURL] error:nil]; 

    videoConnection = [self connectionWithMediaType:AVMediaTypeVideo fromConnections:movieOutput.connections]; 

    /* This is where the code is breaking */ 
    [movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self]; 

- (AVCaptureConnection *)connectionWithMediaType:(NSString *)mediaType fromConnections:(NSArray *)connections { 
    for (AVCaptureConnection *connection in connections) { 
     for (AVCaptureInputPort *port in [connection inputPorts]) { 
      if ([[port mediaType] isEqual:mediaType]) { 
       return connection; 
      } 
     } 
    } 
    return nil; 
} 

#pragma mark - AVCaptureFileOutputRecordingDelegate 

- (void)captureOutput:(AVCaptureFileOutput *)captureOutput didFinishRecordingToOutputFileAtURL:(NSURL *)outputFileURL fromConnections:(NSArray *)connections error:(NSError *)error { 
    if (!error) { 
     //Do something 
    } else { 
     NSLog(@"Error: %@", [error localizedDescription]); 
    } 
} 

#pragma mark - Recoding Destination URL 

- (NSURL *)outputURL { 
    NSString *documentsDirectory = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; 
    NSString *filePath = [documentsDirectory stringByAppendingPathComponent:VIDEO_FILE]; 
    return [NSURL fileURLWithPath:filePath]; 
} 

#pragma mark - Buttons 

- (void)handleLongPressGesture:(UILongPressGestureRecognizer *)recognizer { 
    if (recognizer.state == UIGestureRecognizerStateBegan) { 
     NSLog(@"Press"); 
     self.button.backgroundColor = [UIColor greenColor]; 
     [self captureVideo]; 
    } 
    if (recognizer.state == UIGestureRecognizerStateEnded) { 
     NSLog(@"Unpress"); 
     self.button.backgroundColor = [UIColor redColor]; 
    } 
} 

- (UIButton *)createButtonWithTitle:(NSString *)title chooseColor:(UIColor *)color { 
    UIButton *button = [[UIButton alloc] initWithFrame:CGRectMake(self.view.frame.size.width - 100, self.view.frame.size.height - 100, 85, 85)]; 
    button.layer.cornerRadius = button.bounds.size.width/2; 
    button.backgroundColor = color; 
    button.tintColor = [UIColor whiteColor]; 
    [self.view addSubview:button]; 
    return button; 
} 

- (UIButton *)createSaveButton { 
    UIButton *button = [[UIButton alloc]initWithFrame:CGRectMake(self.view.frame.size.width, 15, 85, 85)]; 
    button.layer.cornerRadius = button.bounds.size.width/2; 
    button.backgroundColor = [UIColor greenColor]; 
    button.tintColor = [UIColor whiteColor]; 
    button.userInteractionEnabled = YES; 
    [button setTitle:@"save" forState:UIControlStateNormal]; 
    [self.view addSubview:button]; 
    return button; 
} 

- (void)saveButtonFlyIn:(UIButton *)button { 
    CGRect movement = button.frame; 
    movement.origin.x = self.view.frame.size.width - 100; 

    [UIView animateWithDuration:0.2 animations:^{ 
     button.frame = movement; 
    }]; 
} 

- (void)saveButtonFlyOut:(UIButton *)button { 
    CGRect movement = button.frame; 
    movement.origin.x = self.view.frame.size.width; 

    [UIView animateWithDuration:0.2 animations:^{ 
     button.frame = movement; 
    }]; 
} 

#pragma mark - Save actions 

- (void)saveActions { 
    [self saveButtonFlyOut:self.saveButton]; 
    self.previewView.image = nil; 
    self.previewView.hidden = YES; 
} 

@end 

該行的代碼遊:

[movieOutput startRecordingToOutputFileURL:[self outputURL] recordingDelegate:self]; 

關閉我的頭頂,我想,這可能是一兩件事情:

  1. 是數據即使在那裏(記錄它,但無法驗證)?
  2. 我是否正確初始化目標網址?
  3. 數據是否與目標兼容?那是一件事嗎?

想知道如何檢查,測試或調試它,您會喜歡您的觀點/全新的眼睛/想法。

乾杯, Ĵ

回答

1

問題就出在你的實現-initializeAVItems

- (void)initializeAVItems { 
    //Start session, input 
    session = [[AVCaptureSession alloc]init]; 
    [session setSessionPreset:AVCaptureSessionPresetPhoto]; 
    ... 
} 

如果你想使用AVCaptureMovieFileOutput錄製視頻,你不能設置AVCaptureSessionsessionPresetAVCaptureSessionPresetPhoto,這是僅適用於靜止圖像。對於高質量的視頻輸出,我建議使用AVCaptureSessionPresetHigh

而且最好打電話canSetSessionPreset:之前確實設置:

session = [AVCaptureSession new]; 
if ([session canSetSessionPreset:AVCaptureSessionPresetHigh]) { 
    session.sessionPreset = AVCaptureSessionPresetHigh; 
} 
+0

完美,謝謝!這解決了這個問題。感謝您幫助我更好地瞭解AVFoundation。 –

相關問題