2014-10-06 92 views
2

我試圖合併視頻與AVFoundation時發現了一個奇怪的行爲。我很確定我在某個地方犯了一個錯誤,但我太盲目了,看不到它。我的目標是合併4個視頻(稍後會在它們之間進行交叉淡入淡出轉換)。 每次我試圖出口的視頻我得到這個錯誤:錯誤域= AVFoundationErrorDomain代碼= -11821「無法解碼」

Error Domain=AVFoundationErrorDomain Code=-11821 "Cannot Decode" UserInfo=0x7fd94073cc30 {NSLocalizedDescription=Cannot Decode, NSLocalizedFailureReason=The media data could not be decoded. It may be damaged.} 

最有趣的事情是,如果我不提供AVAssetExportSession與AVMutableVideoComposition,然後一切工作正常!我無法理解我做錯了什麼。源視頻從YouTube上下載並具有.mp4擴展名。我可以用MPMoviePlayerController播放它們。在檢查源代碼時,請仔細查看AVMutableVideoComposition。 我在iOS模擬器上測試了Xcode 6.0.1中的這段代碼。

#import "VideoStitcher.h" 
#import <UIKit/UIKit.h> 
#import <AVFoundation/AVFoundation.h> 
#import <AssetsLibrary/AssetsLibrary.h> 

@implementation VideoStitcher 
{ 
    VideoStitcherCompletionBlock _completionBlock; 
    AVMutableComposition *_composition; 
    AVMutableVideoComposition *_videoComposition; 
} 

- (instancetype)init 
{ 
    self = [super init]; 
    if (self) 
    { 
     _composition = [AVMutableComposition composition]; 
     _videoComposition = [AVMutableVideoComposition videoComposition]; 
    } 
    return self; 
} 

- (void)compileVideoWithAssets:(NSArray *)assets completion:(VideoStitcherCompletionBlock)completion 
{ 
    _completionBlock = [completion copy]; 

    if (assets == nil || assets.count < 2) 
    { 
     // We need at least two video to make a stitch, right? 
     NSAssert(NO, @"VideoStitcher: assets parameter is nil or has not enough items in it"); 
    } 
    else 
    { 
     [self composeAssets:assets]; 
     if (_composition != nil) // if stitching went good and no errors were found 
      [self exportComposition]; 
    } 
} 

- (void)composeAssets:(NSArray *)assets 
{ 
    AVMutableCompositionTrack *compositionVideoTrack = [_composition addMutableTrackWithMediaType:AVMediaTypeVideo 
                    preferredTrackID:kCMPersistentTrackID_Invalid]; 

    NSError *compositionError = nil; 
    CMTime currentTime = kCMTimeZero; 
    AVAsset *asset = nil; 
    for (int i = (int)assets.count - 1; i >= 0; i--) //For some reason videos are compiled in reverse order. Find the bug later. 06.10.14 
    { 
     asset = assets[i]; 
     AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
     BOOL success = [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, assetVideoTrack.timeRange.duration) 
                 ofTrack:assetVideoTrack 
                 atTime:currentTime 
                 error:&compositionError]; 
     if (success) 
     { 
      CMTimeAdd(currentTime, asset.duration); 
     } 
     else 
     { 
      NSLog(@"VideoStitcher: something went wrong during inserting time range in composition"); 
      if (compositionError != nil) 
      { 
       NSLog(@"%@", compositionError); 
       _completionBlock(nil, compositionError); 
       _composition = nil; 
       return; 
      } 
     } 
    } 

    AVMutableVideoCompositionInstruction *videoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration); 
    videoCompositionInstruction.backgroundColor = [[UIColor redColor] CGColor]; 
    _videoComposition.instructions = @[videoCompositionInstruction]; 
    _videoComposition.renderSize = [self calculateOptimalRenderSizeFromAssets:assets]; 
    _videoComposition.frameDuration = CMTimeMake(1, 600); 
} 

- (void)exportComposition 
{ 
    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:@"testVideo.mov"]; 
    NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 


    NSString *filePath = [url path]; 
    NSFileManager *fileManager = [NSFileManager defaultManager]; 
    if ([fileManager fileExistsAtPath:filePath]) { 
     NSError *error; 
     if ([fileManager removeItemAtPath:filePath error:&error] == NO) { 
      NSLog(@"removeItemAtPath %@ error:%@", filePath, error); 
     } 
    } 

    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:_composition 
                     presetName:AVAssetExportPreset1280x720]; 
    exporter.outputURL = url; 
    exporter.outputFileType = AVFileTypeQuickTimeMovie; 
    exporter.shouldOptimizeForNetworkUse = YES; 
    exporter.videoComposition = _videoComposition; 
    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
     [self exportDidFinish:exporter]; 
    }]; 
} 

- (void)exportDidFinish:(AVAssetExportSession*)session 
{ 
    NSLog(@"%li", session.status); 
    if (session.status == AVAssetExportSessionStatusCompleted) 
    { 
     NSURL *outputURL = session.outputURL; 

     // time to call delegate methods, but for testing purposes we save the video in 'photos' app 

     ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init]; 
     if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) 
     { 
      [library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error){ 
       if (error == nil) 
       { 
        NSLog(@"successfully saved video"); 
       } 
       else 
       { 
        NSLog(@"saving video failed.\n%@", error); 
       } 
      }]; 
     } 
    } 
    else if (session.status == AVAssetExportSessionStatusFailed) 
    { 
     NSLog(@"VideoStitcher: exporting failed.\n%@", session.error); 
    } 
} 

- (CGSize)calculateOptimalRenderSizeFromAssets:(NSArray *)assets 
{ 
    AVAsset *firstAsset = assets[0]; 
    AVAssetTrack *firstAssetVideoTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
    CGFloat maxWidth = firstAssetVideoTrack.naturalSize.height; 
    CGFloat maxHeight = firstAssetVideoTrack.naturalSize.width; 

    for (AVAsset *asset in assets) 
    { 
     AVAssetTrack *assetVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; 
     if (assetVideoTrack.naturalSize.width > maxWidth) 
      maxWidth = assetVideoTrack.naturalSize.width; 
     if (assetVideoTrack.naturalSize.height > maxHeight) 
      maxHeight = assetVideoTrack.naturalSize.height; 
    } 

    return CGSizeMake(maxWidth, maxHeight); 
} 

@end 

感謝您的關注。我真的很累,我一直試圖找到連續四個小時的錯誤。我現在去睡覺。

+0

你能提供哪些線路崩潰嗎? – 2014-10-06 14:38:41

+0

^該應用程序不會崩潰。 AVAssetExportSession的狀態失敗,並向我提供錯誤Error Domain = AVFoundationErrorDomain Code = -11821「無法解碼」。 – eoLithic 2014-10-06 15:18:39

回答

4

我終於找到了解決方案。錯誤描述導致我的方向錯誤:「無法解碼,媒體數據無法解碼,可能已損壞。」從這個描述你可能會認爲你的視頻文件有問題。我花了5個小時試驗格式,調試等。

那麼,答案是完全不同的!

我的錯誤是我忘了CMTimeADD()返回值。我認爲它改變了它的第一個參數的值,並在代碼中,你可以看到這一點:

CMTime currentTime = kCMTimeZero; 
for (int i = (int)assets.count - 1; i >= 0; i--) 
{ 
    CMTimeAdd(currentTime, asset.duration); //HERE!! I don't actually increment the value! currentTime is always kCMTimeZero 
} 
videoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, _composition.duration); // And that's where everything breaks! 

我所學到的教訓是:當與AVFoundation工作經常檢查你的時間價值!這非常重要,否則你會得到很多錯誤。

+0

是的,你是正確的。你必須非常小心,同時在AVFoundation的時間工作。 – Reckoner 2016-03-29 05:51:08

相關問題