2012-11-06 52 views
2

我目前正在研究合併所需數量視頻的iOS應用程序。一旦用戶點擊該按鈕合併的視頻,該視頻被加入,然後開始玩使用AVPlayer爲:iOS中的視頻編輯問題

CMTime nextClipStartTime = kCMTimeZero; 
NSInteger i; 
CMTime transitionDuration = CMTimeMake(1, 1); // Default transition duration is one second. 

// Add two video tracks and two audio tracks. 
AVMutableCompositionTrack *compositionVideoTracks[2]; 
AVMutableCompositionTrack *compositionAudioTracks[2]; 
compositionVideoTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
compositionVideoTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
compositionAudioTracks[0] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
compositionAudioTracks[1] = [self.mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

CMTimeRange *passThroughTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]); 
CMTimeRange *transitionTimeRanges = alloca(sizeof(CMTimeRange) * [self.selectedAssets count]); 

// Place clips into alternating video & audio tracks in composition, overlapped by transitionDuration. 
for (i = 0; i < [self.selectedAssets count]; i++) 
{ 
    NSInteger alternatingIndex = i % 2; // alternating targets: 0, 1, 0, 1, ... 
    AVURLAsset *asset = [self.selectedAssets objectAtIndex:i]; 

    NSLog(@"number of tracks %d",asset.tracks.count); 

    CMTimeRange assetTimeRange; 
    assetTimeRange.start = kCMTimeZero; 
    assetTimeRange.duration = asset.duration; 
    NSValue *clipTimeRange = [NSValue valueWithCMTimeRange:assetTimeRange]; 
    CMTimeRange timeRangeInAsset; 
    if (clipTimeRange) 
     timeRangeInAsset = [clipTimeRange CMTimeRangeValue]; 
    else 
     timeRangeInAsset = CMTimeRangeMake(kCMTimeZero, [asset duration]); 

    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    [compositionVideoTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipVideoTrack atTime:nextClipStartTime error:nil]; 

    AVAssetTrack *clipAudioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 
    [compositionAudioTracks[alternatingIndex] insertTimeRange:timeRangeInAsset ofTrack:clipAudioTrack atTime:nextClipStartTime error:nil]; 

    // Remember the time range in which this clip should pass through. 
    // Every clip after the first begins with a transition. 
    // Every clip before the last ends with a transition. 
    // Exclude those transitions from the pass through time ranges. 
    passThroughTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, timeRangeInAsset.duration); 
    if (i > 0) { 
     passThroughTimeRanges[i].start = CMTimeAdd(passThroughTimeRanges[i].start, transitionDuration); 
     passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
    } 
    if (i+1 < [self.selectedAssets count]) { 
     passThroughTimeRanges[i].duration = CMTimeSubtract(passThroughTimeRanges[i].duration, transitionDuration); 
    } 

    // The end of this clip will overlap the start of the next by transitionDuration. 
    // (Note: this arithmetic falls apart if timeRangeInAsset.duration < 2 * transitionDuration.) 
    nextClipStartTime = CMTimeAdd(nextClipStartTime, timeRangeInAsset.duration); 
    nextClipStartTime = CMTimeSubtract(nextClipStartTime, transitionDuration); 

    // Remember the time range for the transition to the next item. 
    transitionTimeRanges[i] = CMTimeRangeMake(nextClipStartTime, transitionDuration); 
} 

// Set up the video composition if we are to perform crossfade or push transitions between clips. 
NSMutableArray *instructions = [NSMutableArray array]; 

// Cycle between "pass through A", "transition from A to B", "pass through B", "transition from B to A". 
for (i = 0; i < [self.selectedAssets count]; i++) 
{ 
    NSInteger alternatingIndex = i % 2; // alternating targets 

    // Pass through clip i. 
    AVMutableVideoCompositionInstruction *passThroughInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    passThroughInstruction.timeRange = passThroughTimeRanges[i]; 
    AVMutableVideoCompositionLayerInstruction *passThroughLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 

    passThroughInstruction.layerInstructions = [NSArray arrayWithObject:passThroughLayer]; 
    [instructions addObject:passThroughInstruction]; 

    AVMutableVideoCompositionLayerInstruction *fromLayer; 

    AVMutableVideoCompositionLayerInstruction *toLayer; 

    if (i+1 < [self.selectedAssets count]) 
    { 
     // Add transition from clip i to clip i+1. 

     AVMutableVideoCompositionInstruction *transitionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     transitionInstruction.timeRange = transitionTimeRanges[i]; 
     fromLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[alternatingIndex]]; 
     toLayer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:compositionVideoTracks[1-alternatingIndex]]; 


     // Fade out the fromLayer by setting a ramp from 1.0 to 0.0. 
     [fromLayer setOpacityRampFromStartOpacity:1.0 toEndOpacity:0.0 timeRange:transitionTimeRanges[i]]; 

     transitionInstruction.layerInstructions = [NSArray arrayWithObjects:fromLayer, toLayer, nil]; 
     [instructions addObject:transitionInstruction]; 



    } 

    AVURLAsset *sourceAsset = [AVURLAsset URLAssetWithURL:[self.selectedItemsURL objectAtIndex:i] options:[NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]]; 

    AVAssetTrack *sourceVideoTrack = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 



    CGSize temp = CGSizeApplyAffineTransform(sourceVideoTrack.naturalSize, sourceVideoTrack.preferredTransform); 
    CGSize size = CGSizeMake(fabsf(temp.width), fabsf(temp.height)); 
    CGAffineTransform transform = sourceVideoTrack.preferredTransform; 

    self.videoComposition.renderSize = sourceVideoTrack.naturalSize; 
    if (size.width > size.height) { 

     [fromLayer setTransform:transform atTime:sourceAsset.duration]; 
    } else { 


     float s = size.width/size.height; 


     CGAffineTransform new = CGAffineTransformConcat(transform, CGAffineTransformMakeScale(s,s)); 

     float x = (size.height - size.width*s)/2; 

     CGAffineTransform newer = CGAffineTransformConcat(new, CGAffineTransformMakeTranslation(x, 0)); 

     [fromLayer setTransform:newer atTime:sourceAsset.duration]; 
    } 



} 

self.videoComposition.instructions = instructions; 

self.videoComposition.frameDuration = CMTimeMake(1, 30); 



NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]]; 

NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

self.exporter = [[AVAssetExportSession alloc] initWithAsset:self.mixComposition presetName:AVAssetExportPresetMediumQuality]; 
self.exporter.outputURL=url; 
self.exporter.outputFileType = AVFileTypeQuickTimeMovie; 
self.exporter.videoComposition = self.videoComposition; 
self.exporter.shouldOptimizeForNetworkUse = YES; 

self.playerItem = [AVPlayerItem playerItemWithAsset:self.mixComposition]; 
self.playerItem.videoComposition = self.videoComposition; 
AVPlayer *player = [AVPlayer playerWithPlayerItem:self.playerItem]; 
AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player]; 
[playerLayer setFrame:CGRectMake(0, 0, self.imageView.frame.size.width, self.imageView.frame.size.height)]; 
[[[self imageView] layer] addSublayer:playerLayer]; 
playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
[player play]; 

[[NSNotificationCenter defaultCenter] 
addObserver:self selector:@selector(checkPlayEnded) name:AVPlayerItemDidPlayToEndTimeNotification object:self.playerItem]; 

我目前面臨着以下問題:

  1. 如果一個視頻是在肖像,以及其他景觀,我如何能夠在風景中旋轉肖像視頻,因爲我的視角是橫向的,但縱向視頻保留其原來的? (我正在加載存儲在相機膠捲中的視頻,而不是將它們記錄在我的應用程序中)

  2. 忽略上述問題,如果我合併任何數量的視頻,它們都可以正常工作。一旦我將該新視頻保存到我的圖書館中,然後再次將其加載到我的應用中,並嘗試與其他一些新視頻一起加入該視頻,分辨率就會受到干擾,儘管這兩個視頻如果在應用中單獨播放,效果都很好。我該如何解決這個問題?

(我曾試圖按照WWDC 2010的視頻編輯教程,所以這段代碼是從那裏提取。)

+0

如果你現在還沒有得到解決方案,讓我知道,我知道如何解決這個問題.... –

+0

@ParvezBelim感謝您的關注。我實施了以下解決方案,幫助我解決了這些問題。 1.每當用戶想要在項目中添加視頻時,將檢查其方向,如果它不是橫向,則將其轉換爲橫向,然後添加到項目中。 –

+0

2.問題2是由於AVExportSession導出的視頻和攝像頭創建的視頻的分辨率不同所致。所以現在,他們首先轉換爲預先選定的分辨率,然後添加到項目中,這導致項目中的所有視頻具有相同的分辨率,因此一切正常。 –

回答

2

您可以檢查的視頻運行時的方位在上面的代碼,而你創建對象用於AVMutableVideoCompositionInstruction。

將被附加到代碼來解決問題是代碼....

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mutableComposition duration]); 
AVAssetTrack *videoTrack = [[mutableComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

AVMutableVideoCompositionLayerInstruction * layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 

UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
BOOL isVideoAssetPortrait_ = NO; 
CGAffineTransform videoTransform = assetVideoTrack.preferredTransform; 

if(videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) 
{ 
    videoAssetOrientation_= UIImageOrientationRight; 
    isVideoAssetPortrait_ = YES; 
} 
if(videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) 
{ 
    videoAssetOrientation_ = UIImageOrientationLeft; 
    isVideoAssetPortrait_ = YES; 
} 

CGFloat FirstAssetScaleToFitRatio = 320.0/assetVideoTrack.naturalSize.width; 
if(isVideoAssetPortrait_) 
{ 
    videoSize=CGSizeMake(350,400); 
    FirstAssetScaleToFitRatio = 320.0/assetVideoTrack.naturalSize.height; 
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
    [layerInstruction setTransform:CGAffineTransformConcat(assetVideoTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
} 
else 
{ 
    videoSize=CGSizeMake(assetVideoTrack.naturalSize.width,assetVideoTrack.naturalSize.height); 
} 

上面的代碼將保持在山水風景視頻和防止視頻是從縱向轉換爲橫向。

我希望這會有所幫助。而不是先轉換成正確的方向,然後再應用編輯。如果追加此代碼,您的一個步驟將會縮短,並且可以在一個代碼中執行兩個操作(即編輯&方向)更快的方法。