2015-11-06 41 views
0

我最初使用示例CapturePause代碼(以下是Github上的基本示例代碼鏈接 - https://github.com/cokecoffe/ios-demo/tree/master/capturepause/CapturePause)編寫了一些代碼來暫停視頻捕獲,顯然暫停捕獲並不棘手,但下面的代碼找了暫停標誌,然後調整了視頻中的時間標記,以確保沒有一個缺口,這個工作非常成功,在這裏是看這方面的方法:Xcode - 如何使用OpenGL暫停視頻捕獲

- (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
BOOL bVideo = YES; 
@synchronized(self) 
{ 
    if (!self.isCapturing || self.isPaused) 
    { 
     return; 
    } 
    if (connection != _videoConnection) 
    { 
     bVideo = NO; 
    } 
    if ((_encoder == nil) && !bVideo) 
    { 
     CMFormatDescriptionRef fmt = CMSampleBufferGetFormatDescription(sampleBuffer); 
     [self setAudioFormat:fmt]; 
     NSString* filename = [NSString stringWithFormat:@"capture%d.mp4", _currentFile]; 
     NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename]; 

     //additional quality encoding strings 
     if (globalheightvalue == 0){ 
     } 
     else { 
      _cy = globalheightvalue; 
      _cx = globalwidthvalue; 
     } 


     _encoder = [VideoEncoder encoderForPath:path Height:_cy width:_cx channels:_channels samples:_samplerate]; 
    } 
    if (_discont) 
    { 
     if (bVideo) 
     { 
      return; 
     } 
     _discont = NO; 
     // calc adjustment 
     CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
     CMTime last = bVideo ? _lastVideo : _lastAudio; 
     if (last.flags & kCMTimeFlags_Valid) 
     { 
      if (_timeOffset.flags & kCMTimeFlags_Valid) 
      { 
       pts = CMTimeSubtract(pts, _timeOffset); 
      } 
      CMTime offset = CMTimeSubtract(pts, last); 
      NSLog(@"Setting offset from %s", bVideo?"video": "audio"); 
      NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale)); 

      // this stops us having to set a scale for _timeOffset before we see the first video time 
      if (_timeOffset.value == 0) 
      { 
       _timeOffset = offset; 
      } 
      else 
      { 
       _timeOffset = CMTimeAdd(_timeOffset, offset); 
      } 
     } 
     _lastVideo.flags = 0; 
     _lastAudio.flags = 0; 
    } 

    // retain so that we can release either this or modified one 
    CFRetain(sampleBuffer); 

    if (_timeOffset.value > 0) 
    { 
     CFRelease(sampleBuffer); 
     sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; 
    } 

    // record most recent time so we know the length of the pause 
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
    CMTime dur = CMSampleBufferGetDuration(sampleBuffer); 
    if (dur.value > 0) 
    { 
     pts = CMTimeAdd(pts, dur); 
    } 
    if (bVideo) 
    { 
     _lastVideo = pts; 
    } 
    else 
    { 
     _lastAudio = pts; 
    } 
} 

// pass frame to encoder 
[_encoder encodeFrame:sampleBuffer isVideo:bVideo]; 
CFRelease(sampleBuffer); 
} 


- (CMSampleBufferRef) adjustTime:(CMSampleBufferRef) sample by:(CMTime) offset 
{ 
CMItemCount count; 
CMSampleBufferGetSampleTimingInfoArray(sample, 0, nil, &count); 
CMSampleTimingInfo* pInfo = malloc(sizeof(CMSampleTimingInfo) * count); 
CMSampleBufferGetSampleTimingInfoArray(sample, count, pInfo, &count); 
for (CMItemCount i = 0; i < count; i++) 
{ 
    pInfo[i].decodeTimeStamp = CMTimeSubtract(pInfo[i].decodeTimeStamp, offset); 
    pInfo[i].presentationTimeStamp = CMTimeSubtract(pInfo[i].presentationTimeStamp, offset); 
} 
CMSampleBufferRef sout; 
CMSampleBufferCreateCopyWithNewTiming(nil, sample, count, pInfo, &sout); 
free(pInfo); 
return sout; 
} 

現在我已經切換到使用基於OpenGL的代碼由於各種原因,並試圖獲得相同的功能(從這裏的基本代碼 - https://github.com/BradLarson/GPUImage)。我認爲我需要在這個領域提供類似的東西,但我不完全確定,因爲OpenGL代碼的複雜性遠遠超出我的頭腦!從文件GPUImageVideoCamera.m這裏代碼區:

- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; 
{ 
if (capturePaused) 
{ return; 
} 
CFAbsoluteTime startTime = CFAbsoluteTimeGetCurrent(); 
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(sampleBuffer); 
int bufferWidth = (int) CVPixelBufferGetWidth(cameraFrame); 
int bufferHeight = (int) CVPixelBufferGetHeight(cameraFrame); 
CFTypeRef colorAttachments = CVBufferGetAttachment(cameraFrame, kCVImageBufferYCbCrMatrixKey, NULL); 
if (colorAttachments != NULL) 
{ 
    if(CFStringCompare(colorAttachments, kCVImageBufferYCbCrMatrix_ITU_R_601_4, 0) == kCFCompareEqualTo) 
    { 
     if (isFullYUVRange) 
     { 
      _preferredConversion = kColorConversion601FullRange; 
     } 
     else 
     { 
      _preferredConversion = kColorConversion601; 
     } 
    } 
    else 
    { 
     _preferredConversion = kColorConversion709; 
    } 
} 
else 
{ 
    if (isFullYUVRange) 
    { 
     _preferredConversion = kColorConversion601FullRange; 
    } 
    else 
    { 
     _preferredConversion = kColorConversion601; 
    } 
} 

CMTime currentTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 

[GPUImageContext useImageProcessingContext]; 

if ([GPUImageContext supportsFastTextureUpload] && captureAsYUV) 
{ 
    CVOpenGLESTextureRef luminanceTextureRef = NULL; 
    CVOpenGLESTextureRef chrominanceTextureRef = NULL; 

//  if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures]) 
    if (CVPixelBufferGetPlaneCount(cameraFrame) > 0) // Check for YUV planar inputs to do RGB conversion 
    { 
     CVPixelBufferLockBaseAddress(cameraFrame, 0); 

     if ((imageBufferWidth != bufferWidth) && (imageBufferHeight != bufferHeight)) 
     { 
      imageBufferWidth = bufferWidth; 
      imageBufferHeight = bufferHeight; 
     } 

     CVReturn err; 
     // Y-plane 
     glActiveTexture(GL_TEXTURE4); 
     if ([GPUImageContext deviceSupportsRedTextures]) 
     { 
      err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); 
     } 
     else 
     { 
      err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE, bufferWidth, bufferHeight, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &luminanceTextureRef); 
     } 
     if (err) 
     { 
      NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 
     } 

     luminanceTexture = CVOpenGLESTextureGetName(luminanceTextureRef); 
     glBindTexture(GL_TEXTURE_2D, luminanceTexture); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 

     // UV-plane 
     glActiveTexture(GL_TEXTURE5); 
     if ([GPUImageContext deviceSupportsRedTextures]) 
     { 
      err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); 
     } 
     else 
     { 
      err = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, [[GPUImageContext sharedImageProcessingContext] coreVideoTextureCache], cameraFrame, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, bufferWidth/2, bufferHeight/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &chrominanceTextureRef); 
     } 
     if (err) 
     { 
      NSLog(@"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err); 
     } 

     chrominanceTexture = CVOpenGLESTextureGetName(chrominanceTextureRef); 
     glBindTexture(GL_TEXTURE_2D, chrominanceTexture); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 

//   if (!allTargetsWantMonochromeData) 
//   { 
      [self convertYUVToRGBOutput]; 
//   } 

     int rotatedImageBufferWidth = bufferWidth, rotatedImageBufferHeight = bufferHeight; 

     if (GPUImageRotationSwapsWidthAndHeight(internalRotation)) 
     { 
      rotatedImageBufferWidth = bufferHeight; 
      rotatedImageBufferHeight = bufferWidth; 
     } 

     [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:rotatedImageBufferWidth height:rotatedImageBufferHeight time:currentTime]; 

     CVPixelBufferUnlockBaseAddress(cameraFrame, 0); 
     CFRelease(luminanceTextureRef); 
     CFRelease(chrominanceTextureRef); 
    } 
    else 
    { 
    } 


    if (_runBenchmark) 
    { 
     numberOfFramesCaptured++; 
     if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK) 
     { 
      CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); 
      totalFrameTimeDuringCapture += currentFrameTime; 
      NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]); 
      NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime); 
     } 
    } 
} 
else 
{ 
    CVPixelBufferLockBaseAddress(cameraFrame, 0); 

    int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(cameraFrame); 
    outputFramebuffer = [[GPUImageContext sharedFramebufferCache] fetchFramebufferForSize:CGSizeMake(bytesPerRow/4, bufferHeight) onlyTexture:YES]; 
    [outputFramebuffer activateFramebuffer]; 

    glBindTexture(GL_TEXTURE_2D, [outputFramebuffer texture]); 

    glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, bytesPerRow/4, bufferHeight, 0, GL_BGRA, GL_UNSIGNED_BYTE, CVPixelBufferGetBaseAddress(cameraFrame)); 

    [self updateTargetsForVideoCameraUsingCacheTextureAtWidth:bytesPerRow/4 height:bufferHeight time:currentTime]; 

    CVPixelBufferUnlockBaseAddress(cameraFrame, 0); 

    if (_runBenchmark) 
    { 
     numberOfFramesCaptured++; 
     if (numberOfFramesCaptured > INITIALFRAMESTOIGNOREFORBENCHMARK) 
     { 
      CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime); 
      totalFrameTimeDuringCapture += currentFrameTime; 
     } 
    } 
} 
} 

僅供參考在OpenGL的例子示例代碼是SimpleVideoFilter - 關於如何去從這個OpenGL的代碼經驗的人加入適量的暫停功能將是任何幫助非常感謝你 - 查茲

回答

0

經過大量的試驗和錯誤後,似乎非常健壯的合併上面的兩個代碼示例,所以任何人使用Brad Larson的OpenGl框架,你可以把這個屬性放到GPUImageVideoCamera.h:

@property(readwrite, nonatomic) BOOL discont; 

這個代碼到就位在GPUImageVideoCamera.m的captureoutput功能的文件

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
if (!self.captureSession.isRunning) 
{ 
    return; 
} 
if (capturePaused) 
{ 
    return; 
} 
else if (captureOutput == audioOutput) 
{ 
    [self processAudioSampleBuffer:sampleBuffer]; 
} 
else 
{ 
    if (dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) 
    { 
     return; 
    } 

if (_discont) 
{ 
    _discont = NO; 
    // calc adjustment 
    CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
    CMTime last = _lastVideo; 
    if (last.flags & kCMTimeFlags_Valid) 
    { 
     if (_timeOffset.flags & kCMTimeFlags_Valid) 
     { 
      pts = CMTimeSubtract(pts, _timeOffset); 
     } 
     CMTime offset = CMTimeSubtract(pts, last); 
     NSLog(@"Adding %f to %f (pts %f)", ((double)offset.value)/offset.timescale, ((double)_timeOffset.value)/_timeOffset.timescale, ((double)pts.value/pts.timescale)); 

     // this stops us having to set a scale for _timeOffset before we see the first video time 
     if (_timeOffset.value == 0) 
     { 
      _timeOffset = offset; 
     } 
     else 
     { 
      _timeOffset = CMTimeAdd(_timeOffset, offset); 
     } 

    } 
    _lastVideo.flags = 0; 
    _lastAudio.flags = 0; 
} 
// retain so that we can release either this or modified one 
CFRetain(sampleBuffer); 

if (_timeOffset.value > 0) 
{ 
    CFRelease(sampleBuffer); 
    sampleBuffer = [self adjustTime:sampleBuffer by:_timeOffset]; 
} 

// record most recent time so we know the length of the pause 
CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 
CMTime dur = CMSampleBufferGetDuration(sampleBuffer); 
if (dur.value > 0) 
{ 
    pts = CMTimeAdd(pts, dur); 
} 
_lastVideo = pts; 

    runAsynchronouslyOnVideoProcessingQueue(^{ 
     //Feature Detection Hook. 
     if (self.delegate) 
     { 
      [self.delegate willOutputSampleBuffer:sampleBuffer]; 
     } 

     [self processVideoSampleBuffer:sampleBuffer]; 

     CFRelease(sampleBuffer); 
     dispatch_semaphore_signal(frameRenderingSemaphore); 
    }); 
} 
} 

從您的主視圖控制器的代碼是超級簡單(列爲按鈕動作,但可以把大部分的地方),並一定要列出_discont = NO在啓動時,以保持它的清潔:

- (IBAction)PauseButton:(id)sender 
{ 
[videoCamera pauseCameraCapture]; 
videoCamera.discont = YES; 
} 

- (IBAction)ResumeButton:(id)sender 
{ 
    [videoCamera resumeCameraCapture]; 
} 

希望這有助於任何人都面臨着同樣的挑戰