2015-09-28 51 views
2

我正在合併視頻, 這是我的代碼,它正在合併視頻,但合併的視頻不可聽(我正在合併合適的視頻,視頻,合併視頻是沉默)任何人都可以幫助我:正確合併視頻,但合併的視頻無聲

NSValue *timeDur; 
    NSMutableArray *arrInstructions = [[NSMutableArray alloc]init] ; 
    NSMutableArray *arrDuration = [[NSMutableArray alloc]init] ; 
    AVAsset *fAsset ; 
    CMTime eachDuration = kCMTimeZero ; 
    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

for (NSInteger counter = 0; counter < self.arrVideoUrls.count; counter++) { 
    AVMutableCompositionTrack *track = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 
    if (counter>0) 
     fAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter-1][@"VideoUrl"]]]; 
    AVAsset *firstAsset = [AVAsset assetWithURL:[NSURL fileURLWithPath:self.arrVideoUrls[counter][@"VideoUrl"]]]; 
    [track insertTimeRange:CMTimeRangeMake(kCMTimeZero,firstAsset.duration) ofTrack:[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:(counter == 0)?kCMTimeZero:eachDuration error:nil]; 
    eachDuration = CMTimeAdd(eachDuration, firstAsset.duration); 
    timeDur = [NSValue valueWithCMTime:firstAsset.duration]; 
    [arrDuration addObject:timeDur]; 
    AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:track]; 
    AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
    BOOL isFirstAssetPortrait_ = NO; 
    CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 
    if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} 
    if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} 
    if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} 
    if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;} 
    CGFloat FirstAssetScaleToFitRatio = 640.0/640.0; 

    //CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width; 
    if (counter == 0) { 
     if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 
     }else{ 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:kCMTimeZero]; 
     } 

    }else{ 
     if(isFirstAssetPortrait_){ 
      FirstAssetScaleToFitRatio = SYSTEM_SCREEN_SIZE.width/FirstAssetTrack.naturalSize.height; 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:firstAsset.duration]; 
     }else{ 
      CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 
      [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:firstAsset.duration]; 
     } 
    } 
    if (counter <self.arrVideoUrls.count - 1) { 
     [FirstlayerInstruction setOpacity:0.0 atTime:eachDuration]; 
    } 

    [arrInstructions addObject:FirstlayerInstruction]; 
} 
AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
CMTime final; 
for (NSInteger counter = 0; counter < arrDuration.count; counter++) { 
    NSValue *value = arrDuration[counter]; 
    CMTime timing = kCMTimeZero ; 
    [value getValue:&timing]; 
    if (counter == 0) { 
     final = timing ; 
    }else 
     final = CMTimeAdd(final, timing); 
} 
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,final); 
MainInstruction.layerInstructions = [arrInstructions copy] ; 
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
MainCompositionInst.frameDuration = CMTimeMake(1, 10); 
NSLog(@"%f",CMTimeGetSeconds(MainCompositionInst.frameDuration)); 
MainCompositionInst.renderSize = CGSizeMake(SYSTEM_SCREEN_SIZE.width, SYSTEM_SCREEN_SIZE.height); 

NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"mergeVideo-%d.mov",arc4random() % 1000]]; 

NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 
exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPreset1920x1080]; 
//AVAssetExportPreset960x540 
exporter.outputURL=url; 
exporter.outputFileType = AVFileTypeQuickTimeMovie; 
exporter.videoComposition = MainCompositionInst; 
//exporter.shouldOptimizeForNetworkUse = YES; 
//exporter.audioMix = audioZeroMix ; 
[exporter exportAsynchronouslyWithCompletionHandler:^ 
{ 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     NSLog(@"%@",exporter.error); 
     Hide_Indicator ; 
     [self exportDidFinish:exporter]; 
    }); 
}]; 

回答

1

現場編輯音頻就像現場編輯視頻。返回到每部電影並獲取音軌並將其粘貼到可變的作品中。

在這個例子中,我抓住視頻的前五秒,並從電影視頻的最後五秒鐘,並把它們一前一後在新視頻:

NSString* type = AVMediaTypeVideo; 
NSArray* arr = [oldAsset tracksWithMediaType:type]; 
AVAssetTrack* track = [arr lastObject]; 
CMTime duration = track.timeRange.duration; 
AVMutableComposition* comp = [AVMutableComposition composition]; 
AVMutableCompositionTrack* comptrack = [comp addMutableTrackWithMediaType:type preferredTrackID:kCMPersistentTrackID_Invalid]; 
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0,600), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(0,600) error:nil]; 
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(duration, CMTimeMakeWithSeconds(5,600)), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(5,600) error:nil]; 

但由此產生的視頻會安靜。所以我也回去拿取相應的音頻:

type = AVMediaTypeAudio; 
arr = [oldAsset tracksWithMediaType:type]; 
track = [arr lastObject]; 
comptrack = [comp addMutableTrackWithMediaType:type preferredTrackID:kCMPersistentTrackID_Invalid]; 
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeMakeWithSeconds(0,600), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(0,600) error:nil]; 
[comptrack insertTimeRange:CMTimeRangeMake(CMTimeSubtract(duration, CMTimeMakeWithSeconds(5,600)), CMTimeMakeWithSeconds(5,600)) ofTrack:track atTime:CMTimeMakeWithSeconds(5,600) error:nil]; 
+0

謝謝:-)桑傑先生 –