3
我試圖製作一個顯示相機預覽的應用程序,然後在某些情況下開始使用語音輸入錄製這些內容,最後重複錄製的電影。爲什麼使用AVCaptureSession,AVAssetWriter和AVPlayer口吃記錄和重放視頻?
我已經編寫了用於預覽/錄製/重放的類和管理其協調的控制器。
似乎這些函數在獨立調用時可以很好地工作,但是我無法讓它們一起工作:播放視頻時,聲音會運行,但圖像需要大約五秒才能顯示然後結束。
這裏是我此代碼:
預覽:
- (void) createSession
{
_session = [[AVCaptureSession alloc] init];
AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:FRONT_CAMERA_ID];
if (!device) device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
_cVideoInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cVideoInput];
device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
error = nil;
_cAudioInput = [[AVCaptureDeviceInput deviceInputWithDevice:device error:&error] retain];
if (!error) [_session addInput:_cAudioInput];
_cameraLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:_session];
_cameraLayer.frame = self.bounds;
[self.layer addSublayer:_cameraLayer];
_videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset640x480];
[_videoOutput setVideoSettings:[NSDictionary dictionaryWithContentsOfFile:VIDEO_SETTINGS]];
_audioOutput = [[AVCaptureAudioDataOutput alloc] init];
dispatch_queue_t queue = dispatch_queue_create(OUTPUT_QUEUE_NAME, NULL);
[_videoOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_videoOutput];
[_audioOutput setSampleBufferDelegate:self queue:queue];
[_session addOutput:_audioOutput];
dispatch_set_context(queue, self);
dispatch_set_finalizer_f(queue, queue_finalizer);
dispatch_release(queue);
[_session startRunning];
}
- (void) deleteSession
{
[_session stopRunning];
[(AVCaptureVideoPreviewLayer *)_cameraLayer setSession:nil];
[_cameraLayer removeFromSuperlayer];
[_cameraLayer release];
_cameraLayer = nil;
[_audioOutput setSampleBufferDelegate:nil queue:NULL];
[_videoOutput setSampleBufferDelegate:nil queue:NULL];
[_audioOutput release];
_audioOutput = nil;
[_videoOutput release];
_videoOutput = nil;
[_cAudioInput release];
_cAudioInput = nil;
[_cVideoInput release];
_cVideoInput = nil;
NSArray *inputs = [_session inputs];
for (AVCaptureInput *input in inputs)
[_session removeInput:input];
NSArray *outputs = [_session outputs];
for (AVCaptureOutput *output in outputs)
[_session removeOutput:output];
[_session release];
_session = nil;
}
錄音:
- (void) createWriter
{
NSString *file = [self file];
if ([[NSFileManager defaultManager] fileExistsAtPath:file]) [[NSFileManager defaultManager] removeItemAtPath:file error:NULL];
NSError *error = nil;
_writer = [[AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:file] fileType:AVFileTypeQuickTimeMovie error:&error] retain];
if (error)
{
[_writer release];
_writer = nil;
NSLog(@"%@", error);
return;
}
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kAudioFormatMPEG4AAC], AVFormatIDKey,
[NSNumber numberWithFloat:44100.0], AVSampleRateKey,
[NSNumber numberWithInt:1], AVNumberOfChannelsKey,
[NSNumber numberWithInt:64000], AVEncoderBitRateKey,
[NSData dataWithBytes:&acl length:sizeof(acl)], AVChannelLayoutKey,
nil ];
_wAudioInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:settings] retain];
[_writer addInput:_wAudioInput];
settings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
_wVideoInput = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings] retain];
[_writer addInput:_wVideoInput];
}
- (void) deleteWriter
{
[_wVideoInput release];
_wVideoInput = nil;
[_wAudioInput release];
_wAudioInput = nil;
[_writer release];
_writer = nil;
}
- (void) RecordingAudioWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wAudioInput isReadyForMoreMediaData])
[_wAudioInput appendSampleBuffer:sampleBuffer];
}
- (void) RecordingVideoWithBuffer:(CMSampleBufferRef)sampleBuffer
{
if (![self canRecordBuffer:sampleBuffer])
return;
if ([_wVideoInput isReadyForMoreMediaData])
[_wVideoInput appendSampleBuffer:sampleBuffer];
}
播放:
- (void) observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context
{
AVPlayerItem *item = (AVPlayerItem *)object;
[item removeObserver:self forKeyPath:@"status"];
switch (item.status)
{
case AVPlayerItemStatusReadyToPlay:
[_player seekToTime:kCMTimeZero];
[_player play];
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(finishPlaying:) name:AVPlayerItemDidPlayToEndTimeNotification object:item];
break;
case AVPlayerItemStatusUnknown:
case AVPlayerItemStatusFailed:
break;
default:
break;
}
}
- (void) finishPlaying:(NSNotification *)notification
{
[_player pause];
[_playerLayer removeFromSuperlayer];
[_playerLayer release];
_playerLayer = nil;
[_player release];
_player = nil;
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVPlayerItemDidPlayToEndTimeNotification object:nil];
}
- (void) play:(NSString *)path
{
_player = [[AVPlayer playerWithURL:[NSURL fileURLWithPath:path]] retain];
_playerLayer = [[AVPlayerLayer playerLayerWithPlayer:_player] retain];
_playerLayer.transform = CATransform3DScale(CATransform3DMakeRotation(M_PI_2, 0, 0, 1), 1, -1, 1);
_playerLayer.frame = self.bounds;
[self.layer addSublayer:_playerLayer];
[_player.currentItem addObserver:self forKeyPath:@"status" options:0 context:NULL];
}