1
我做的,我也要把iOS screen video
。對於查看其工作正常,但我們的要求是記錄屏幕視頻時的視頻是running.I教程應用程序試圖播放視頻使用UIWebview
和MPMoviePlayer
。直到玩家開始它正在錄製。但開始玩家後,我只得到黑屏。任何建議。如何使用iOS sdk記錄設備屏幕視頻?
我跟着這個鏈接:
http://developer.apple.com/library/ios/#qa/qa1703/_index.html
-(void) startRecording {
// create the AVAssetWriter
NSString *moviePath = [[self pathToDocumentsDirectory] stringByAppendingPathComponent:OUTPUT_FILE_NAME];
if ([[NSFileManager defaultManager] fileExistsAtPath:moviePath]) {
[[NSFileManager defaultManager] removeItemAtPath:moviePath error:nil];
}
NSURL *movieURL = [NSURL fileURLWithPath:moviePath];
NSError *movieError = nil;
[assetWriter release];
assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL
fileType: AVFileTypeQuickTimeMovie
error: &movieError];
NSDictionary *assetWriterInputSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:FRAME_WIDTH], AVVideoWidthKey,
[NSNumber numberWithInt:FRAME_HEIGHT], AVVideoHeightKey,
nil];
assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType: AVMediaTypeVideo
outputSettings:assetWriterInputSettings];
assetWriterInput.expectsMediaDataInRealTime = YES;
[assetWriter addInput:assetWriterInput];
[assetWriterPixelBufferAdaptor release];
assetWriterPixelBufferAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc]
initWithAssetWriterInput:assetWriterInput
sourcePixelBufferAttributes:nil];
[assetWriter startWriting];
firstFrameWallClockTime = CFAbsoluteTimeGetCurrent();
[assetWriter startSessionAtSourceTime: CMTimeMake(0, TIME_SCALE)];
// start writing samples to it
[assetWriterTimer release];
assetWriterTimer = [NSTimer scheduledTimerWithTimeInterval:0.1
target:self
selector:@selector (writeSample:)
userInfo:nil
repeats:YES] ;
}
-(void) stopRecording {
[assetWriterTimer invalidate];
assetWriterTimer = nil;
[assetWriter finishWriting];
NSLog (@"finished writing");
}
- (UIImage*)screenshot
{
// Create a graphics context with the target size
// On iOS 4 and later, use UIGraphicsBeginImageContextWithOptions to take the scale into consideration
// On iOS prior to 4, fall back to use UIGraphicsBeginImageContext
CGSize imageSize = [[UIScreen mainScreen] bounds].size;
CGFloat imageScale = imageSize.width/FRAME_WIDTH;
if (NULL != UIGraphicsBeginImageContextWithOptions)
UIGraphicsBeginImageContextWithOptions(imageSize, NO, imageScale);
else
UIGraphicsBeginImageContext(imageSize);
CGContextRef context = UIGraphicsGetCurrentContext();
// Iterate over every window from back to front
for (UIWindow *window in [[UIApplication sharedApplication] windows])
{
if (![window respondsToSelector:@selector(screen)] || [window screen] == [UIScreen mainScreen])
{
// -renderInContext: renders in the coordinate space of the layer,
// so we must first apply the layer's geometry to the graphics context
CGContextSaveGState(context);
// Center the context around the window's anchor point
CGContextTranslateCTM(context, [window center].x, [window center].y);
// Apply the window's transform about the anchor point
CGContextConcatCTM(context, [window transform]);
// Offset by the portion of the bounds left of and above the anchor point
CGContextTranslateCTM(context,
-[window bounds].size.width * [[window layer] anchorPoint].x,
-[window bounds].size.height * [[window layer] anchorPoint].y);
// Render the layer hierarchy to the current context
[[window layer] renderInContext:context];
// Restore the context
CGContextRestoreGState(context);
}
}
// Retrieve the screenshot image
UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext();
return image;
}
-(void) writeSample: (NSTimer*) _timer {
if (assetWriterInput.readyForMoreMediaData) {
// CMSampleBufferRef sample = nil;
CVReturn cvErr = kCVReturnSuccess;
// get screenshot image!
CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
NSLog (@"made screenshot");
// prepare the pixel buffer
CVPixelBufferRef pixelBuffer = NULL;
CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
NSLog (@"copied image data");
cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
FRAME_WIDTH,
FRAME_HEIGHT,
kCVPixelFormatType_32BGRA,
(void*)CFDataGetBytePtr(imageData),
CGImageGetBytesPerRow(image),
NULL,
NULL,
NULL,
&pixelBuffer);
NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);
// calculate the time
CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
CFTimeInterval elapsedTime = thisFrameWallClockTime - firstFrameWallClockTime;
NSLog (@"elapsedTime: %f", elapsedTime);
CMTime presentationTime = CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
// write the sample
BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
if (appended) {
NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
} else {
NSLog (@"failed to append");
[self stopRecording];
self.startStopButton.selected = NO;
}
}
}
其中:
NSTimer *clockTimer;
NSTimer *assetWriterTimer;
AVMutableComposition *mutableComposition;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterInput;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferAdaptor;
CFAbsoluteTime firstFrameWallClockTime;
任何人都可以幫助我... – Imran