5
當我通過使用AVCaptureVideoDataOutput和AVCaptureAudioDataOutput錄製音頻+視頻時,存在延遲問題。有時候視頻塊會持續幾毫秒,有時音頻與視頻不同步。使用AVCaptureVideoDataOutput和AVCaptureAudioDataOutput時的性能問題
我插入了一些日誌,並觀察到,首先我在captureOutput回調中獲得了很多視頻緩衝區,並且在一段時間後我得到了音頻緩衝區(有時候我根本沒有收到音頻緩衝區,沒有聲音)。如果我評論處理視頻緩衝區的代碼,我可以毫無問題地獲得音頻緩衝區。
這是我使用的代碼:
-(void)initMovieOutput:(AVCaptureSession *)captureSessionLocal
{
AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
self._videoOutput = dataOutput;
[dataOutput release];
self._videoOutput.alwaysDiscardsLateVideoFrames = NO;
self._videoOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
forKey:(id)kCVPixelBufferPixelFormatTypeKey
];
AVCaptureAudioDataOutput *audioOutput = [[AVCaptureAudioDataOutput alloc] init];
self._audioOutput = audioOutput;
[audioOutput release];
[captureSessionLocal addOutput:self._videoOutput];
[captureSessionLocal addOutput:self._audioOutput];
// Setup the queue
dispatch_queue_t queue = dispatch_queue_create("MyQueue", NULL);
[self._videoOutput setSampleBufferDelegate:self queue:queue];
[self._audioOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
}
這裏我設置的作家:
-(BOOL) setupWriter:(NSURL *)videoURL session:(AVCaptureSession *)captureSessionLocal
{
NSError *error = nil;
self._videoWriter = [[AVAssetWriter alloc] initWithURL:videoURL fileType:AVFileTypeQuickTimeMovie
error:&error];
NSParameterAssert(self._videoWriter);
// Add video input
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:640], AVVideoWidthKey,
[NSNumber numberWithInt:480], AVVideoHeightKey,
nil];
self._videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(self._videoWriterInput);
self._videoWriterInput.expectsMediaDataInRealTime = YES;
self._videoWriterInput.transform = [self returnOrientation];
// Add the audio input
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
// Both type of audio inputs causes output video file to be corrupted.
// should work on any device requires more space
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey,
[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,
nil ];
self._audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType: AVMediaTypeAudio
outputSettings: audioOutputSettings ];
self._audioWriterInput.expectsMediaDataInRealTime = YES;
// add input
[self._videoWriter addInput:_videoWriterInput];
[self._videoWriter addInput:_audioWriterInput];
return YES;
}
,這裏是回調:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
if(!CMSampleBufferDataIsReady(sampleBuffer))
{
NSLog(@"sample buffer is not ready. Skipping sample");
return;
}
if(_videoWriter.status != AVAssetWriterStatusCompleted)
{
if(_videoWriter.status != AVAssetWriterStatusWriting )
{
CMTime lastSampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[_videoWriter startWriting];
[_videoWriter startSessionAtSourceTime:lastSampleTime];
}
if(captureOutput == _videoOutput)
{
if([self._videoWriterInput isReadyForMoreMediaData])
{
[self newVideoSample:sampleBuffer];
}
}
else if(captureOutput == _audioOutput)
{
if([self._audioWriterInput isReadyForMoreMediaData])
{
[self newAudioSample:sampleBuffer];
}
}
}
}
-(void) newAudioSample:(CMSampleBufferRef)sampleBuffer
{
if(_videoWriter.status > AVAssetWriterStatusWriting)
{
[self NSLogPrint:[NSString stringWithFormat:@"Audio:Warning: writer status is %d", _videoWriter.status]];
if(_videoWriter.status == AVAssetWriterStatusFailed)
[self NSLogPrint:[NSString stringWithFormat:@"Audio:Error: %@", _videoWriter.error]];
return;
}
if(![_audioWriterInput appendSampleBuffer:sampleBuffer])
[self NSLogPrint:[NSString stringWithFormat:@"Unable to write to audio input"]];
}
-(void) newVideoSample:(CMSampleBufferRef)sampleBuffer
{
if(_videoWriter.status > AVAssetWriterStatusWriting)
{
[self NSLogPrint:[NSString stringWithFormat:@"Video:Warning: writer status is %d", _videoWriter.status]];
if(_videoWriter.status == AVAssetWriterStatusFailed)
[self NSLogPrint:[NSString stringWithFormat:@"Video:Error: %@", _videoWriter.error]];
return;
}
if(![_videoWriterInput appendSampleBuffer:sampleBuffer])
[self NSLogPrint:[NSString stringWithFormat:@"Unable to write to video input"]];
}
有什麼我的代碼錯了,爲什麼視頻滯後? (我正在測試它在Iphone 4上的ios 4.2.1)
謝謝。這是一個很大的幫助。 – Liron
我知道這個答案是舊的,但你可以舉一個例子來說明如何做到這一點?我嘗試了單獨的(新的)串行隊列,這並不奏效,我試圖用DISPATCH_QUEUE_CONCURRENT設置一個隊列,這也沒有幫助。 –
詳細闡述我最後的評論:當我使用兩個單獨的隊列時,我導致我的維護者失敗 –