2012-04-09 55 views
3

在我的一個應用程序中,我需要在視頻中添加一些圖像。所以我分兩部分剪斷視頻,並從該圖像製作一個視頻。現在我想結合這三個視頻文件並製作一個視頻文件。但我不知道把這三個視頻結合起來。我在這裏看到一些代碼。但這對我沒有幫助。對於中斷視頻和製作來自圖像的視頻,我現在使用下面的代碼,我希望代碼合併此全部視頻。在iPhone應用程序中合併兩個視頻文件

任何其他想法將當前視圖屏幕放在視頻文件中間。從圖片

CGRect rect=CGRectMake(0, 0, 320, 480); 
view = [[UIView alloc]initWithFrame:rect]; 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 
NSString *path = [documentsDirectory stringByAppendingPathComponent:[@"video2" stringByAppendingString:@".mov"]]; 

CGSize size = self.view.frame.size; 


NSMutableDictionary *attributes = [[NSMutableDictionary alloc]init]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:320] forKey:(NSString*)kCVPixelBufferWidthKey]; 
[attributes setObject:[NSNumber numberWithUnsignedInt:480] forKey:(NSString*)kCVPixelBufferHeightKey]; 


NSError *error = nil; 
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL: 
           [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie 
                  error:&error]; 


NSParameterAssert(videoWriter); 
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys: 
           AVVideoCodecH264, AVVideoCodecKey, 
           [NSNumber numberWithInt:size.width], AVVideoWidthKey, 
           [NSNumber numberWithInt:size.height], AVVideoHeightKey, 
           nil]; 

AVAssetWriterInput* writerInput = [[AVAssetWriterInput 
            assetWriterInputWithMediaType:AVMediaTypeVideo 
            outputSettings:videoSettings] retain]; 


AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor 
               assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput 
               sourcePixelBufferAttributes:nil]; 


NSParameterAssert(writerInput); 
NSParameterAssert([videoWriter canAddInput:writerInput]); 
[videoWriter addInput:writerInput]; 


//Start a session: 
[videoWriter startWriting]; 
[videoWriter startSessionAtSourceTime:kCMTimeZero]; 

CVPixelBufferRef buffer = NULL; 

//convert uiimage to CGImage. 

xPixel=0; 
yPixel=250; 

buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; 



CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &buffer); 
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero]; 

for (int i = 0;i<2; i++) 
{ 
    if([writerInput isReadyForMoreMediaData]) 
    { 
     //NSLog(@"inside for loop %d",i); 

     for(int pframetime=1;pframetime<=2;pframetime++) 
     { 

      CMTime frameTime = CMTimeMake(pframetime,25); 
      CMTime lastTime=CMTimeMake(i,1); //i is from 0 to 19 of the loop above 
      CMTime presentTime=CMTimeAdd(lastTime, frameTime); 

      if(i==0) 
       buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"1.jpeg"] CGImage]]; 
      else 
       buffer = [self pixelBufferFromCGImage:[[UIImage imageNamed:@"2.jpeg"] CGImage]]; 
      while (![writerInput isReadyForMoreMediaData]) 
      { 
       [NSThread sleepForTimeInterval:0.05]; 
      } 

      [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime]; 
      i++; 
     } 
     if(buffer) 
      CVBufferRelease(buffer); 
     //[NSThread sleepForTimeInterval:0.1]; 
    } 
} 
[writerInput markAsFinished]; 
[videoWriter finishWriting]; 
[videoPathArray addObject:path]; 

//Finish the session: 

[videoWriter release]; 
[writerInput release]; 
CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 

對於合併的視頻文件我試試這個代碼,但這裏沒有用

對於打破視頻文件

NSURL *url = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"Affagogato" ofType:@"mp4"]]; 
AVURLAsset *anAsset = [[AVURLAsset alloc] initWithURL:url options:nil]; 




for(int i = 0; i < 2; i++) { 
    AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] 
              initWithAsset:anAsset presetName:AVAssetExportPresetLowQuality]; 
    NSString *filePath = nil; 
    NSUInteger count = 0; 
    do { 
     filePath = NSTemporaryDirectory(); 

     NSString *numberString = count > 0 ? [NSString stringWithFormat:@"-%i", count] : @""; 
     filePath = [filePath stringByAppendingPathComponent:[NSString stringWithFormat:@"Output-%@.mov", numberString]]; 
     count++; 
    } while([[NSFileManager defaultManager] fileExistsAtPath:filePath]);  

    exportSession.outputURL = [NSURL fileURLWithPath:filePath]; 
    exportSession.outputFileType = AVFileTypeQuickTimeMovie; 
    CMTimeRange range; 
    if(i == 0){ 
     CMTime start = CMTimeMakeWithSeconds(0.0, 600); 
     CMTime duration = CMTimeMakeWithSeconds(10.0, 600); 
     range = CMTimeRangeMake(start, duration); 
    }else{ 
     CMTime start = CMTimeMakeWithSeconds(10.0, 600); 
     range = CMTimeRangeMake(start, anAsset.duration); 
    } 
    exportSession.timeRange = range; 

    [exportSession exportAsynchronouslyWithCompletionHandler:^ 
    { 
     dispatch_async(dispatch_get_main_queue(), ^{ 
      [self exportDidFinish:exportSession Tag:i]; 
     }); 
    }]; 
} 

獲取視頻是視頻之間存在一些空白屏幕

AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    NSString *documentsDirectoryPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) objectAtIndex:0]; 

    NSString* video_inputFilePath1 = [videoPathArray objectAtIndex:1]; 
    NSURL* video_inputFileUrl1 = [NSURL fileURLWithPath:video_inputFilePath1]; 

    NSString* video_inputFilePath2 = [videoPathArray objectAtIndex:0]; 
    NSURL* video_inputFileUrl2 = [NSURL fileURLWithPath:video_inputFilePath2]; 

    NSString* video_inputFilePath3 = [videoPathArray objectAtIndex:2]; 
    NSURL* video_inputFileUrl3 = [NSURL fileURLWithPath:video_inputFilePath3]; 

    NSString* outputFileName = @"outputFile.mov"; 
    NSString* outputFilePath = [NSString stringWithFormat:@"%@/%@",documentsDirectoryPath,outputFileName]; 

    NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath]) 
     [[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil]; 

    CMTime nextClipStartTime = kCMTimeZero; 


    AVURLAsset* videoAsset1 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl1 options:nil]; 
    AVURLAsset* videoAsset2 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl2 options:nil]; 
    AVURLAsset* videoAsset3 = [[AVURLAsset alloc]initWithURL:video_inputFileUrl3 options:nil]; 


    CMTimeRange video_timeRange1 = CMTimeRangeMake(kCMTimeZero,videoAsset1.duration); 
    AVMutableCompositionTrack *a_compositionVideoTrack1 = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    [a_compositionVideoTrack1 insertTimeRange:video_timeRange1 ofTrack:[[videoAsset1 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:nextClipStartTime error:nil]; 

    CMTimeRange video_timeRange3 = CMTimeRangeMake(nextClipStartTime,videoAsset3.duration); 

    [a_compositionVideoTrack1 insertTimeRange:video_timeRange3 ofTrack:[[videoAsset3 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; 

    CMTimeRange video_timeRange2 = CMTimeRangeMake(nextClipStartTime,videoAsset1.duration); 
    [a_compositionVideoTrack1 insertTimeRange:video_timeRange2 ofTrack:[[videoAsset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:videoAsset1.duration error:nil]; 



    AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetLowQuality]; 
    _assetExport.shouldOptimizeForNetworkUse = YES; 
    _assetExport.outputFileType = @"com.apple.quicktime-movie"; 
    _assetExport.outputURL = outputFileUrl; 

回答

相關問題