2012-06-19 78 views
3

我從前兩週面臨一個問題。其實我正在開發一款iPad應用程序。我想在其中進行註釋並對該註釋進行屏幕錄製。註釋部分工作正常,但是當我開始錄製時會出現問題。問題在於它失去了平滑性,並在屏幕錄製過程中出現滯後現象。對於屏幕錄製,我正在使用AVAsset Writer。代碼對於註釋和屏幕錄製都很好......但我不知道問題出在哪裏?在ipad上錄製屏幕

我的截圖大小爲(1050,650)

我應該使用大中央調度來解決這個問題??? 任何人可以幫助我解決了我的問題.....

PLZ plz幫助我....

我的代碼

// For Annotation 

     - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event 
    { 
     mouseSwiped = NO; 
    UITouch *touch = [touches anyObject]; 

    if ([touch tapCount] == 2) 
    { 
     drawImage.image = nil; //Double click to undo drawing. 
     return; 
    } 

    lastPoint = [touch locationInView:self.view]; 
    lastPoint.y -= 20; 

    } 
    - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event 
    { 

    mouseSwiped = YES; 
    UITouch *touch = [touches anyObject]; 
    CGPoint currentPoint = [touch locationInView:self.view]; 
    currentPoint.y -= 20; 
// UIGraphicsBeginImageContext(canvasView.frame.size); 
UIGraphicsBeginImageContext(drawImage.frame.size); 
    [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)]; 
    CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound); 
    CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 10.0); 
    CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0); 
    CGContextBeginPath(UIGraphicsGetCurrentContext()); 
    CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); 
    CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), currentPoint.x, currentPoint.y); 
    CGContextStrokePath(UIGraphicsGetCurrentContext()); 
    drawImage.image = UIGraphicsGetImageFromCurrentImageContext(); 
    UIGraphicsEndImageContext(); 

    lastPoint = currentPoint; 

    } 
    - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event 
    { 

    UITouch *touch = [touches anyObject]; 
    if ([touch tapCount] == 2) 
    { 
     drawImage.image = nil; 
     return; 
    } 


    if(!mouseSwiped) 
    { 
     UIGraphicsBeginImageContext(drawImage.frame.size); 
     [drawImage.image drawInRect:CGRectMake(0, 0, drawImage.frame.size.width,drawImage.frame.size.height)]; 
     CGContextSetLineCap(UIGraphicsGetCurrentContext(), kCGLineCapRound); 
     CGContextSetLineWidth(UIGraphicsGetCurrentContext(), 5.0); 
     CGContextSetRGBStrokeColor(UIGraphicsGetCurrentContext(), 1.0, 0.0, 0.0, 1.0); 
     CGContextMoveToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); 
     CGContextAddLineToPoint(UIGraphicsGetCurrentContext(), lastPoint.x, lastPoint.y); 
     CGContextStrokePath(UIGraphicsGetCurrentContext()); 
     CGContextFlush(UIGraphicsGetCurrentContext()); 
     drawImage.image = UIGraphicsGetImageFromCurrentImageContext(); 
     UIGraphicsEndImageContext(); 
    } 

    } 



    //For Screen Recording 

    #define FRAME_WIDTH 1024    
    #define FRAME_HEIGHT 650 
    #define TIME_SCALE 600 

    - (UIImage*)screenshot 
    { 

    UIGraphicsBeginImageContext(drawImage.frame.size); 
[self.view.layer renderInContext:UIGraphicsGetCurrentContext()]; 
UIImage *viewImage = UIGraphicsGetImageFromCurrentImageContext(); 
UIGraphicsEndImageContext(); 
    return viewImage; 
    } 


    -(NSURL*) pathToDocumentsDirectory { 


NSString* outputPath = [[NSString alloc] initWithFormat:@"%@/%@", [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0], @"output.mov"]; 
outputURL = [[NSURL alloc] initFileURLWithPath:outputPath]; 
NSFileManager* fileManager = [NSFileManager defaultManager]; 
if ([fileManager fileExistsAtPath:outputPath]) { 
    NSError* error; 
    if ([fileManager removeItemAtPath:outputPath error:&error] == NO) { 
     NSLog(@"Could not delete old recording file at path: %@", outputPath); 
    } 
} 

[outputPath release]; 
return [outputURL autorelease]; 

    } 


    -(void) writeSample: (NSTimer*) _timer 
    { 


    if (assetWriterInput.readyForMoreMediaData) { 
    // CMSampleBufferRef sample = nil; 

    CVReturn cvErr = kCVReturnSuccess; 

    // get screenshot image! 
    CGImageRef image = (CGImageRef) [[self screenshot] CGImage]; 
    NSLog (@"made screenshot"); 

    // prepare the pixel buffer 
    CVPixelBufferRef pixelBuffer = NULL; 
    CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image)); 
    NSLog (@"copied image data"); 
    cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault, 
             FRAME_WIDTH, 
             FRAME_HEIGHT, 
             kCVPixelFormatType_32BGRA, 
             (void*)CFDataGetBytePtr(imageData), 
             CGImageGetBytesPerRow(image), 
             NULL, 
             NULL, 
             NULL, 
             &pixelBuffer); 
    NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr); 

    // calculate the time 
    CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent(); 
    CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime; 
    NSLog (@"elapsedTime: %f", elapsedTime); 
    CMTime presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE); 

    // write the sample 
    BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime]; 

    if (appended) { 
     NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime)); 
    } else { 
     NSLog (@"failed to append"); 
     [self stopRecording]; 

    } 
} 



    } 


    -(void) startRecording { 


movieURL = [self pathToDocumentsDirectory]; 
NSLog(@"path=%@",movieURL); 
movieError = nil; 
[assetWriter release]; 

assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL 
             fileType: AVFileTypeQuickTimeMovie 
              error: &movieError]; 

[self writer]; 

// start writing samples to it 

NSDate* start = [NSDate date]; 
frameRate=40.0f;                                       
float processingSeconds = [[NSDate date] timeIntervalSinceDate:start]; 
delayRemaining = (1.0/self.frameRate) - processingSeconds; 
[assetWriterTimer release]; 

assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:delayRemaining > 0.0 ? delayRemaining : 0.01 
                target:self 
                selector:@selector (writeSample:) 
                userInfo:nil 
                repeats:YES] ; 

    } 


    -(void)writer 
    { 

NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
              AVVideoCodecH264, AVVideoCodecKey, 
              [NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey, 
              [NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey, 
              nil]; 

assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo 
                 outputSettings:assetWriterInputSettings]; 
assetWriterInput.expectsMediaDataInRealTime = YES; 
[assetWriter addInput:assetWriterInput]; 

[assetWriterPixelBufferAdaptor release]; 
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] 
           initWithAssetWriterInput:assetWriterInput 
           sourcePixelBufferAttributes:nil]; 
[assetWriter startWriting]; 

firstFrameWallClockTime = CFAbsoluteTimeGetCurrent(); 
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)]; 

    } 


    -(void) stopRecording { 


[assetWriterTimer invalidate]; 
assetWriterTimer = nil; 

[assetWriter finishWriting]; 
NSLog (@"finished writing"); 

    } 

回答

0

最簡單的方法是儘量降低幀率。