0

在我的項目中,我必須添加一個書頁翻頁動畫,並在本書的右側頁面上播放視頻。第一個視頻完成後,頁面將變成書頁,第二個視頻將在下一個右側頁面上播放,依此類推。現在,我必須將所有這些內容保存爲可下載的視頻,以便下載的視頻從圖庫中播放時看起來與我在我的應用中播放的內容相同。現在我正在記錄設備的屏幕並將其保存在服務器中進行下載。除了視頻播放器之外,所有的事情都可以。在我正在錄製的視頻中,所有視頻正在播放的部分(位於本書的右側頁面)未被錄製。 我正在使用波紋管代碼來記錄屏幕。如果您有任何人有其他想法做同樣的事情,請與我分享,或者如果需要更改我的代碼,請提出建議。感謝提前。在播放視頻時iPhone的屏幕錄製

// ASScreenRecorder.h 
    #import <Foundation/Foundation.h> 
    #import <UIKit/UIKit.h> 
    typedef void (^VideoCompletionBlock)(void); 
    @protocol ASScreenRecorderDelegate; 

    @interface ASScreenRecorder : NSObject 
    @property (nonatomic, readonly) BOOL isRecording; 

    @property (nonatomic, weak) id <ASScreenRecorderDelegate> delegate; 

    // if saveURL is nil, video will be saved into camera roll 
    // this property can not be changed whilst recording is in progress 
    @property (strong, nonatomic) NSURL *videoURL; 

    + (instancetype)sharedInstance; 
    - (BOOL)startRecording; 
    - (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock; 
    @end 


    // If your view contains an AVCaptureVideoPreviewLayer or an openGL view 
    // you'll need to write that data into the CGContextRef yourself. 
    // In the viewcontroller responsible for the AVCaptureVideoPreviewLayer/openGL view 
    // set yourself as the delegate for ASScreenRecorder. 
    // [ASScreenRecorder sharedInstance].delegate = self 
    // Then implement 'writeBackgroundFrameInContext:(CGContextRef*)contextRef' 
    // use 'CGContextDrawImage' to draw your view into the provided CGContextRef 
    @protocol ASScreenRecorderDelegate <NSObject> 
    - (void)writeBackgroundFrameInContext:(CGContextRef*)contextRef; 
    @end 



// ASScreenRecorder.m 
// ScreenRecorder 
// 
// Created by Alan Skipp on 23/04/2014. 
// Copyright (c) 2014 Alan Skipp. All rights reserved. 
// 

#import "ASScreenRecorder.h" 
#import <AVFoundation/AVFoundation.h> 
#import <QuartzCore/QuartzCore.h> 
#import <AssetsLibrary/AssetsLibrary.h> 

@interface ASScreenRecorder() 
@property (strong, nonatomic) AVAssetWriter *videoWriter; 
@property (strong, nonatomic) AVAssetWriterInput *videoWriterInput; 
@property (strong, nonatomic) AVAssetWriterInputPixelBufferAdaptor *avAdaptor; 
@property (strong, nonatomic) CADisplayLink *displayLink; 
@property (strong, nonatomic) NSDictionary *outputBufferPoolAuxAttributes; 
@property (nonatomic) CFTimeInterval firstTimeStamp; 
@property (nonatomic) BOOL isRecording; 
@end 

@implementation ASScreenRecorder 
{ 
    dispatch_queue_t _render_queue; 
    dispatch_queue_t _append_pixelBuffer_queue; 
    dispatch_semaphore_t _frameRenderingSemaphore; 
    dispatch_semaphore_t _pixelAppendSemaphore; 

    CGSize _viewSize; 
    CGFloat _scale; 

    CGColorSpaceRef _rgbColorSpace; 
    CVPixelBufferPoolRef _outputBufferPool; 
} 

#pragma mark - initializers 

+ (instancetype)sharedInstance { 
    static dispatch_once_t once; 
    static ASScreenRecorder *sharedInstance; 
    dispatch_once(&once, ^{ 
     sharedInstance = [[self alloc] init]; 
    }); 
    return sharedInstance; 
} 

- (instancetype)init 
{ 
    self = [super init]; 
    if (self) { 
     _viewSize = [UIApplication sharedApplication].delegate.window.bounds.size; 
     _scale = [UIScreen mainScreen].scale; 
     // record half size resolution for retina iPads 
     if ((UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) && _scale > 1) { 
      _scale = 1.0; 
     } 
     _isRecording = NO; 

     _append_pixelBuffer_queue = dispatch_queue_create("ASScreenRecorder.append_queue", DISPATCH_QUEUE_SERIAL); 
     _render_queue = dispatch_queue_create("ASScreenRecorder.render_queue", DISPATCH_QUEUE_SERIAL); 
     dispatch_set_target_queue(_render_queue, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); 
     _frameRenderingSemaphore = dispatch_semaphore_create(1); 
     _pixelAppendSemaphore = dispatch_semaphore_create(1); 
    } 
    return self; 
} 

#pragma mark - public 

- (void)setVideoURL:(NSURL *)videoURL 
{ 
    NSAssert(!_isRecording, @"videoURL can not be changed whilst recording is in progress"); 
    _videoURL = videoURL; 
} 

- (BOOL)startRecording 
{ 
    if (!_isRecording) { 
     [self setUpWriter]; 
     _isRecording = (_videoWriter.status == AVAssetWriterStatusWriting); 
     _displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(writeVideoFrame)]; 
     [_displayLink addToRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes]; 
    } 
    return _isRecording; 
} 

- (void)stopRecordingWithCompletion:(VideoCompletionBlock)completionBlock; 
{ 
    if (_isRecording) { 
     _isRecording = NO; 
     [_displayLink removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:NSRunLoopCommonModes]; 
     [self completeRecordingSession:completionBlock]; 
    } 
} 

#pragma mark - private 

-(void)setUpWriter 
{ 
    _rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 

    NSDictionary *bufferAttributes = @{(id)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA), 
             (id)kCVPixelBufferCGBitmapContextCompatibilityKey : @YES, 
             (id)kCVPixelBufferWidthKey : @(_viewSize.width * _scale), 
             (id)kCVPixelBufferHeightKey : @(_viewSize.height * _scale), 
             (id)kCVPixelBufferBytesPerRowAlignmentKey : @(_viewSize.width * _scale * 4) 
             }; 

    _outputBufferPool = NULL; 
    CVPixelBufferPoolCreate(NULL, NULL, (__bridge CFDictionaryRef)(bufferAttributes), &_outputBufferPool); 


    NSError* error = nil; 
    _videoWriter = [[AVAssetWriter alloc] initWithURL:self.videoURL ?: [self tempFileURL] 
              fileType:AVFileTypeQuickTimeMovie 
               error:&error]; 
    NSParameterAssert(_videoWriter); 

    NSInteger pixelNumber = _viewSize.width * _viewSize.height * _scale; 
    NSDictionary* videoCompression = @{AVVideoAverageBitRateKey: @(pixelNumber * 11.4)}; 

    NSDictionary* videoSettings = @{AVVideoCodecKey: AVVideoCodecH264, 
            AVVideoWidthKey: [NSNumber numberWithInt:_viewSize.width*_scale], 
            AVVideoHeightKey: [NSNumber numberWithInt:_viewSize.height*_scale], 
            AVVideoCompressionPropertiesKey: videoCompression}; 

    _videoWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings]; 
    NSParameterAssert(_videoWriterInput); 

    _videoWriterInput.expectsMediaDataInRealTime = YES; 
    _videoWriterInput.transform = [self videoTransformForDeviceOrientation]; 

    _avAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_videoWriterInput sourcePixelBufferAttributes:nil]; 

    [_videoWriter addInput:_videoWriterInput]; 

    [_videoWriter startWriting]; 
    [_videoWriter startSessionAtSourceTime:CMTimeMake(0, 1000)]; 
} 

- (CGAffineTransform)videoTransformForDeviceOrientation 
{ 
    CGAffineTransform videoTransform; 
    switch ([UIDevice currentDevice].orientation) { 
     case UIDeviceOrientationLandscapeLeft: 
      videoTransform = CGAffineTransformMakeRotation(-M_PI_2); 
      break; 
     case UIDeviceOrientationLandscapeRight: 
      videoTransform = CGAffineTransformMakeRotation(M_PI_2); 
      break; 
     case UIDeviceOrientationPortraitUpsideDown: 
      videoTransform = CGAffineTransformMakeRotation(M_PI); 
      break; 
     default: 
      videoTransform = CGAffineTransformIdentity; 
    } 
    return videoTransform; 
} 

- (NSURL*)tempFileURL 
{ 
    NSString *outputPath = [NSHomeDirectory() stringByAppendingPathComponent:@"tmp/screenCapture.mp4"]; 
    [self removeTempFilePath:outputPath]; 
    return [NSURL fileURLWithPath:outputPath]; 
} 

- (void)removeTempFilePath:(NSString*)filePath 
{ 
    NSFileManager* fileManager = [NSFileManager defaultManager]; 
    if ([fileManager fileExistsAtPath:filePath]) { 
     NSError* error; 
     if ([fileManager removeItemAtPath:filePath error:&error] == NO) { 
      NSLog(@"Could not delete old recording:%@", [error localizedDescription]); 
     } 
    } 
} 

- (void)completeRecordingSession:(VideoCompletionBlock)completionBlock; 
{ 
    dispatch_async(_render_queue, ^{ 
     dispatch_sync(_append_pixelBuffer_queue, ^{ 

      [_videoWriterInput markAsFinished]; 
      [_videoWriter finishWritingWithCompletionHandler:^{ 

       void (^completion)(void) = ^() { 
        [self cleanup]; 
        dispatch_async(dispatch_get_main_queue(), ^{ 
         if (completionBlock) completionBlock(); 
        }); 
       }; 

       if (self.videoURL) { 
        completion(); 
       } else { 
        ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
        [library writeVideoAtPathToSavedPhotosAlbum:_videoWriter.outputURL completionBlock:^(NSURL *assetURL, NSError *error) { 
         if (error) { 
          NSLog(@"Error copying video to camera roll:%@", [error localizedDescription]); 
         } else { 
          [self removeTempFilePath:_videoWriter.outputURL.path]; 
          completion(); 
         } 
        }]; 
       } 
      }]; 
     }); 
    }); 
} 

- (void)cleanup 
{ 
    self.avAdaptor = nil; 
    self.videoWriterInput = nil; 
    self.videoWriter = nil; 
    self.firstTimeStamp = 0; 
    self.outputBufferPoolAuxAttributes = nil; 
    CGColorSpaceRelease(_rgbColorSpace); 
    CVPixelBufferPoolRelease(_outputBufferPool); 
} 

- (void)writeVideoFrame 
{ 
    // throttle the number of frames to prevent meltdown 
    // technique gleaned from Brad Larson's answer here: http://stackoverflow.com/a/5956119 
    if (dispatch_semaphore_wait(_frameRenderingSemaphore, DISPATCH_TIME_NOW) != 0) { 
     return; 
    } 
    dispatch_async(_render_queue, ^{ 
     if (![_videoWriterInput isReadyForMoreMediaData]) return; 

     if (!self.firstTimeStamp) { 
      self.firstTimeStamp = _displayLink.timestamp; 
     } 
     CFTimeInterval elapsed = (_displayLink.timestamp - self.firstTimeStamp); 
     CMTime time = CMTimeMakeWithSeconds(elapsed, 1000); 

     CVPixelBufferRef pixelBuffer = NULL; 
     CGContextRef bitmapContext = [self createPixelBufferAndBitmapContext:&pixelBuffer]; 

     if (self.delegate) { 
      [self.delegate writeBackgroundFrameInContext:&bitmapContext]; 
     } 
     // draw each window into the context (other windows include UIKeyboard, UIAlert) 
     // FIX: UIKeyboard is currently only rendered correctly in portrait orientation 
     dispatch_sync(dispatch_get_main_queue(), ^{ 
      UIGraphicsPushContext(bitmapContext); { 
       for (UIWindow *window in [[UIApplication sharedApplication] windows]) { 
        [window drawViewHierarchyInRect:CGRectMake(0, 0, _viewSize.width, _viewSize.height) afterScreenUpdates:NO]; 
       } 
      } UIGraphicsPopContext(); 
     }); 

     // append pixelBuffer on a async dispatch_queue, the next frame is rendered whilst this one appends 
     // must not overwhelm the queue with pixelBuffers, therefore: 
     // check if _append_pixelBuffer_queue is ready 
     // if it’s not ready, release pixelBuffer and bitmapContext 
     if (dispatch_semaphore_wait(_pixelAppendSemaphore, DISPATCH_TIME_NOW) == 0) { 
      dispatch_async(_append_pixelBuffer_queue, ^{ 
       BOOL success = [_avAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:time]; 
       if (!success) { 
        NSLog(@"Warning: Unable to write buffer to video"); 
       } 
       CGContextRelease(bitmapContext); 
       CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 
       CVPixelBufferRelease(pixelBuffer); 

       dispatch_semaphore_signal(_pixelAppendSemaphore); 
      }); 
     } else { 
      CGContextRelease(bitmapContext); 
      CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); 
      CVPixelBufferRelease(pixelBuffer); 
     } 

     dispatch_semaphore_signal(_frameRenderingSemaphore); 
    }); 
} 

- (CGContextRef)createPixelBufferAndBitmapContext:(CVPixelBufferRef *)pixelBuffer 
{ 
    CVPixelBufferPoolCreatePixelBuffer(NULL, _outputBufferPool, pixelBuffer); 
    CVPixelBufferLockBaseAddress(*pixelBuffer, 0); 

    CGContextRef bitmapContext = NULL; 
    bitmapContext = CGBitmapContextCreate(CVPixelBufferGetBaseAddress(*pixelBuffer), 
              CVPixelBufferGetWidth(*pixelBuffer), 
              CVPixelBufferGetHeight(*pixelBuffer), 
              8, CVPixelBufferGetBytesPerRow(*pixelBuffer), _rgbColorSpace, 
              kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst 
             ); 
    CGContextScaleCTM(bitmapContext, _scale, _scale); 
    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, _viewSize.height); 
    CGContextConcatCTM(bitmapContext, flipVertical); 

    return bitmapContext; 
} 

@end 
+0

Gobinda:任何解決方案? –

回答

-1

@Gobinda此代碼適用於我的情況。如果你想記錄窗口的一部分,你需要指定幀。在init方法中,viewSize被定義爲窗口大小。所以你需要改變viewSize作爲你的視頻幀。