2013-05-02 60 views
15

我使用AVMutableComposition創建視頻並使用AVAssetExportSession將其導出,並將其預設爲AVAssetExportPresetHighestQualityAVMutableComposition視頻創建問題

當視頻的尺寸很小時,視頻創建得很好。但是,當視頻尺寸較大時(1280×1920),視頻未正確創建。綠色覆蓋被添加到視頻,如下圖:

enter image description here

但是當我使用AVAssetExportPresetPassthrough代替AVAssetExportPresetHighestQuality,創建視頻罰款。

任何想法爲什麼發生這種情況?

在此先感謝!

+3

一些更detials討好。關於資產,如果你正在合併資產或什麼的? – Maverick 2014-10-30 09:16:18

+0

這個問題具體到視網膜iPad? – uchiha 2015-03-03 14:33:05

+1

要添加到@ Maverick的問題,是以編程方式創建的資產?如果是這樣,它是從您的應用程序內,還是從其他地方導入它們? – 2015-03-10 01:40:18

回答

1

試試這個代碼:

-(void)convertVideo:(NSString *)videoPath{ 
     AVURLAsset *firstAsset=[[AVURLAsset alloc]initWithURL:[NSURL URLWithString: [NSString stringWithFormat:@"%@", videoPath]] options:nil]; 
     AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

     //VIDEO TRACK 
     AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

     [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 

     AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
     MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); 

     CMTime audioTime = kCMTimeIndefinite; 
     CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration); 

     AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 
     [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord error:nil]; 
     [audioSession setActive:YES error:nil]; 

     if (OS_VERSION>=7) { 
      if (firstAsset!=NULL) { 
       AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 

       [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil]; 
      } 
     } 
     else{ 
      AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 
      [b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:audioTime error:nil]; 
     } 

     //FIXING ORIENTATION// 
     AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 

     AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
     UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
     BOOL isFirstAssetPortrait_ = NO; 
     CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 

     if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0){ 
      FirstAssetOrientation_= UIImageOrientationRight; 
      isFirstAssetPortrait_ = YES; 
     } 
     if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0){ 
      FirstAssetOrientation_ = UIImageOrientationLeft; 
      isFirstAssetPortrait_ = YES; 
     } 
     if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0){ 
      FirstAssetOrientation_ = UIImageOrientationUp; 
     } 
     if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0){ 
      FirstAssetOrientation_ = UIImageOrientationDown; 
     } 

     [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 

     CGFloat FirstAssetScaleToFitRatio = 0; 


     MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,nil]; 

     AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
     MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
     MainCompositionInst.frameDuration = CMTimeMake(1, 30); 

     /*if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = 0.67; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
      naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width); 
      }else{ 

       naturalSizeFirst = FirstAssetTrack.naturalSize; 
     } 
     */ 

     CGSize naturalSizeFirst; 
     if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = FirstAssetTrack.naturalSize.width/FirstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
      naturalSizeFirst = CGSizeMake(FirstAssetTrack.naturalSize.height, FirstAssetTrack.naturalSize.width); 

     }else { 
      naturalSizeFirst = FirstAssetTrack.naturalSize; 
     } 

     MainCompositionInst.renderSize = CGSizeMake(naturalSizeFirst.width, naturalSizeFirst.height); 

     NSString *tmpDirectory = NSTemporaryDirectory(); 
     NSString *fname = [NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]; 
     NSString *tmpFile = [tmpDirectory stringByAppendingPathComponent:fname]; 

     NSURL *url = [NSURL fileURLWithPath:tmpFile]; 

     AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality]; 
     exporter.outputURL=url; 
     exporter.outputFileType = AVFileTypeQuickTimeMovie; 
     exporter.videoComposition = MainCompositionInst; 
     exporter.shouldOptimizeForNetworkUse = YES; 

     [exporter exportAsynchronouslyWithCompletionHandler:^{ 

      if (AVAssetExportSessionStatusCompleted == exporter.status) { 
       [self.progressStatus setText:@"Converted..."]; 

       selectedVideo = selectedVideo+1; 
      } else if (AVAssetExportSessionStatusFailed == exporter.status) { 
       // a failure may happen because of an event out of your control 
       // for example, an interruption like a phone call comming in 
       // make sure and handle this case appropriately 
       //ALog(@"AVAssetExportSessionStatusFailed %@",exporter.error); 

      } else { 
       //ALog(@"Export Session Status: %d", exporter.status); 
      } 
     }]; 
    } 
}