我最初使用示例CapturePause代碼(以下是Github上的基本示例代碼鏈接 - https://github.com/cokecoffe/ios-demo/tree/master/capturepause/CapturePause)編寫了一些代碼來暫停視頻捕獲,顯然暫停捕獲並不棘手,但下面的代碼找了暫停標誌,然後調整了視頻中的時間標記,以確保沒有一個缺口,這個工作非常成功,在這裏是看這方面的方法:Xcode - 如何使用OpenGL暫停視頻捕獲
- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
BOOL bVideo = YES;
@synchronized(self)
{
if (!self.isCapturing || self.isPaused)
{
return;
}
if (connection != _videoConnection)
{
bVideo = NO;
}
if ((_encoder == nil) && !bVideo)
{
CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer);
[self setAudioFormat:fmt];
NSString* filename = [NSString stringWithFormat:@"capture%d.mp4", _currentFile];
NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
//additional quality encoding strings
if (globalheightvalue == 0){
}
else {
_cy = globalheightvalue;
_cx = globalwidthvalue;
}
_encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate];
}
if (_discont)
{
if (bVideo)
{
return;
}
_discont = NO;
// calc adjustment
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime last = bVideo ? _lastVideo : _lastAudio;
if (last.flags & kCMTimeFlags_Valid)
{
if (_timeOffset.flags & kCMTimeFlags_Valid)
{
pts = CMTimeSubtract(pts, _timeOffset);
}
CMTime offset = CMTimeSubtract(pts, last);
NSLog(@"Setting offset from %s", bVideo?"video": "audio");
NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale));
// this stops us having to set a scale for _timeOffset before we see the first video time
if (_timeOffset.value == 0)
{
_timeOffset = offset;
}
else
{
_timeOffset = CMTimeAdd(_timeOffset, offset);
}
}
_lastVideo.flags = 0;
_lastAudio.flags = 0;
}
// retain so that we can release either this or modified one
CFRetain(sampleBuffer);
if (_timeOffset.value > 0)
{
CFRelease(sampleBuffer);
sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset];
}
// record most recent time so we know the length of the pause
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
CMTime dur = CMSampleBufferGetDuration(sampleBuffer);
if (dur.value > 0)
{
pts = CMTimeAdd(pts, dur);
}
if (bVideo)
{
_lastVideo = pts;
}
else
{
_lastAudio = pts;
}
}
// pass frame to encoder
[_encoder encodeFrame:sampleBuffer isVideo:bVideo];
CFRelease(sampleBuffer);
}
- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset
{
CMItemCount count;
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count);
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count);
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count);
for (CMItemCount i = 0; i < count; i++)
{
pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset);
pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset);
}
CMSampleBufferRef sout;
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout);
free(pInfo);
return sout;
}
現在我已經切換到使用基於OpenGL的代碼由於各種原因,並試圖獲得相同的功能(從這裏的基本代碼 - https://github.com/BradLarson/GPUImage)。我認爲我需要在這個領域提供類似的東西,但我不完全確定,因爲OpenGL代碼的複雜性遠遠超出我的頭腦!從文件GPUImageVideoCamera.m這裏代碼區:
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
if (capturePaused)
{ return;
}
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent();
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer);
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame);
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame);
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL);
if (colorAttachments != NULL)
{
if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo)
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
else
{
_preferredConversion = kColorConversion709;
}
}
else
{
if (isFullYUVRange)
{
_preferredConversion = kColorConversion601FullRange;
}
else
{
_preferredConversion = kColorConversion601;
}
}
CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[GPUImageContext useImageProcessingContext];
if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV)
{
CVOpenGLESTextureRef luminanceTextureRef = NULL;
CVOpenGLESTextureRef chrominanceTextureRef = NULL;
// if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight))
{
imageBufferWidth = bufferWidth;
imageBufferHeight = bufferHeight;
}
CVReturn err;
// Y-plane
glActiveTexture(GL_TEXTURE4);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef);
glBindTexture(GL_TEXTURE_2D, luminanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// UV-plane
glActiveTexture(GL_TEXTURE5);
if ([GPUImageContext deviceSupportsRedTextures])
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
else
{
err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef);
}
if (err)
{
NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err);
}
chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef);
glBindTexture(GL_TEXTURE_2D, chrominanceTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// if (!allTargetsWantMonochromeData)
// {
[self convertYUVToRGBOutput];
// }
int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight;
if (GPUImageRotationSwapsWidthAndHeight(internalRotation))
{
rotatedImageBufferWidth = bufferHeight;
rotatedImageBufferHeight = bufferWidth;
}
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
CFRelease(luminanceTextureRef);
CFRelease(chrominanceTextureRef);
}
else
{
}
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
}
else
{
CVPixelBufferLockBaseAddress(cameraFrame, 0);
int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame);
outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow/4, bufferHeight) onlyTexture:YES];
[outputFramebuffer activateFramebuffer];
glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow/4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame));
[self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow/4 height:bufferHeight time:currentTime];
CVPixelBufferUnlockBaseAddress(cameraFrame, 0);
if (_runBenchmark)
{
numberOfFramesCaptured++;
if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK)
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
}
}
}
}
僅供參考在OpenGL的例子示例代碼是SimpleVideoFilter - 關於如何去從這個OpenGL的代碼經驗的人加入適量的暫停功能將是任何幫助非常感謝你 - 查茲