2013-07-26 8 views
-2

我通過渲染OpenGL紋理生成影片。結果電影的某些幀看起來並沒有完全渲染,因爲它們顯示了前一幀的一部分。 如果我添加一個NSThread [NSThread sleepForTimeInterval:0.05];這個問題沒有出現,但我不能依靠這條指令。使用OpenGL TextureCache生成影片的iOS不能正確顯示幀

This is the result without [NSThread sleepForTimeInterval:0.05]

This is the result when adding [NSThread sleepForTimeInterval:0.05]

我使用的代碼是:

dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),^
      {     
       //Video writer 
       AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
              [NSURL fileURLWithPath:tempVideoPath] 
                     fileType:AVFileTypeQuickTimeMovie 
                     error:&error]; 
       NSParameterAssert(videoWriter); 

       NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
               AVVideoCodecH264, AVVideoCodecKey, 
               [NSNumber numberWithInt:MOVsize.width], AVVideoWidthKey, 
               [NSNumber numberWithInt:MOVsize.height], AVVideoHeightKey, 
               nil]; 
       AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput 
                 assetWriterInputWithMediaType:AVMediaTypeVideo 
                 outputSettings:videoSettings]; 
       videoWriterInput.expectsMediaDataInRealTime=NO; 

       NSParameterAssert(videoWriterInput); 
       NSParameterAssert([videoWriter canAddInput:videoWriterInput]); 

       [videoWriter addInput:videoWriterInput]; 

       NSDictionary* pixelAttributesDict; 

       pixelAttributesDict= [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA], kCVPixelBufferPixelFormatTypeKey, 
             [NSNumber numberWithInt:1024], kCVPixelBufferWidthKey, 
             [NSNumber numberWithInt:768], kCVPixelBufferHeightKey, 
             nil]; 

       AVAssetWriterInputPixelBufferAdaptor* adaptor=[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput sourcePixelBufferAttributes:pixelAttributesDict]; 
       [videoWriter startWriting]; 
       [videoWriter startSessionAtSourceTime:kCMTimeZero]; 

       if([EAGLContext currentContext]!= glContext) 
       [EAGLContext setCurrentContext:glContext]; 

       [self createDataFBO:adaptor]; 

       for (int frame=0;frame<samplesNumber;frame++){ 

       while (!videoWriterInput.readyForMoreMediaData) 
       { 
        NSDate *maxDate = [NSDate dateWithTimeIntervalSinceNow:0.1]; 
        [[NSRunLoop currentRunLoop] runUntilDate:maxDate]; 
       } 

        //glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); 

        //Render current frame with openGL 
        [self renderFrameAt:frame]; 

        CVReturn lockResult= CVPixelBufferLockBaseAddress(renderTarget, 0); 
        BOOL test =(lockResult==kCVReturnSuccess) && [adaptor appendPixelBuffer:renderTarget withPresentationTime:CMTimeMake(frame, kFps)]; 
        if(!test) { 
        NSLog(@"append failed!"); 
        } 

//如果我在這裏補充[NSThread sleepForTimeInterval:0.05];渲染工作正常

    CVPixelBufferUnlockBaseAddress(renderTarget, 0); 

       dispatch_async(dispatch_get_main_queue(),^
           { 
            [self updateProgressBar]; 
           }); 
       } 

       [videoWriterInput markAsFinished]; 
       [videoWriter endSessionAtSourceTime:CMTimeMake(samplesNumber, kFps)]; 
       [videoWriter finishWritingWithCompletionHandler:^{ 

       [self destroyDataFBO]; 

       //End of movie generation 
       }]; 
      }); 

}

這是我用它來創建紋理緩存

- (void)createDataFBO:(AVAssetWriterInputPixelBufferAdaptor *) adaptor; 
    { 
     glActiveTexture(GL_TEXTURE1); 
     glGenFramebuffers(1, &movieFramebuffer); 
     glBindFramebuffer(GL_FRAMEBUFFER, movieFramebuffer); 

    #if defined(__IPHONE_6_0) 
     CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, [EAGLContext currentContext], NULL, &coreVideoTextureCache); 
    #else 
     CVReturn err = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, (__bridge void *)[EAGLContext currentContext], NULL, &coreVideoTextureCache); 
    #endif 

     if (err) 
     { 
     NSAssert(NO, @"Error at CVOpenGLESTextureCacheCreate %d", err); 
     } 

     // Code originally sourced from http://allmybrain.com/2011/12/08/rendering-to-a-texture-with-ios-5-texture-cache-api/ 
     CVPixelBufferPoolCreatePixelBuffer (NULL, [adaptor pixelBufferPool], &renderTarget); 

     size_t frameWidth = CVPixelBufferGetWidth(renderTarget); 
     size_t frameHeight = CVPixelBufferGetHeight(renderTarget); 

     CVOpenGLESTextureCacheCreateTextureFromImage (kCFAllocatorDefault, coreVideoTextureCache, renderTarget, 
                NULL, // texture attributes 
                GL_TEXTURE_2D, 
                GL_RGBA, // opengl format 
                frameWidth, 
                frameHeight, 
                GL_BGRA, // native iOS format 
                GL_UNSIGNED_BYTE, 
                0, 
                &renderTexture); 

     glBindTexture(CVOpenGLESTextureGetTarget(renderTexture), CVOpenGLESTextureGetName(renderTexture)); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 
     glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 

     glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, CVOpenGLESTextureGetName(renderTexture), 0); 

     GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER); 

     NSAssert(status == GL_FRAMEBUFFER_COMPLETE, @"Incomplete filter FBO: %d", status); 
    } 

回答

0

代碼你有做你的渲染後試圖glFlush()?

+0

我在渲染結束時使用glFinish(),我嘗試添加glFlush,但仍然有重疊的幀。 – vitil