2016-09-28 59 views
1

當我在ios 9中將兩個視頻與AVAssetExportSession混合時,它的工作完美無缺。但是當我在iOS 10中與AVAssetExportSession混合時,它不起作用。如果有任何知道的原因,請幫助我,謝謝。ios 10中的AVAssetExportSession不適用於iPhone 7

actualy代碼工作iphone 6S和更早版本,但不是用於iPhone 7

工作例如

-(void) blendVideoOverVideo:(NSURL*)mainVideoUrl andBlendVideoUrl:(NSURL*)liveEffectUrl 
{ 
    AVURLAsset *mainVideoUrlAsset =[AVURLAsset URLAssetWithURL:mainVideoUrl options:nil]; 
    // AVPlayerItem* mainVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:mainVideoUrlAsset]; 
    AVAssetTrack* mainVideoTrack =[[mainVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject]; 
    CGSize mainVideoSize = [mainVideoTrack naturalSize]; 

    AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init]; 

    AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:mainVideoUrl options:nil]; 
    if(mainVideoUrl!=nil) 
    { 
     if([[audioAsset tracksWithMediaType:AVMediaTypeAudio] count]) 
     { 
      AVMutableCompositionTrack *compositionCommentaryTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio 
                           preferredTrackID:kCMPersistentTrackID_Invalid]; 
      [compositionCommentaryTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) 
               ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] 
               atTime:kCMTimeZero 
                error:nil]; 
     } 
    } 

    AVMutableCompositionTrack *mainVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    [mainVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration) ofTrack:mainVideoTrack atTime:kCMTimeZero error:nil]; 

    AVMutableVideoCompositionLayerInstruction *mainVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mainVideoConpositionTrack]; 

    //SEcond Track 
    AVURLAsset *blendVideoUrlAsset =[AVURLAsset URLAssetWithURL:liveEffectUrl options:nil]; 
    // AVPlayerItem* blendVideoPlayerItem =[[AVPlayerItem alloc]initWithAsset:blendVideoUrlAsset]; 
    AVAssetTrack* blendVideoTrack =[[blendVideoUrlAsset tracksWithMediaType:AVMediaTypeVideo]firstObject]; 
    CGSize blendVideoSize = [blendVideoTrack naturalSize]; 

    AVMutableCompositionTrack *blendVideoConpositionTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    CMTime oldTime=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale); 

// CMTime timeNew=CMTimeMakeWithSeconds(CMTimeGetSeconds(blendVideoUrlAsset.duration)/2, blendVideoUrlAsset.duration.timescale); 


    [blendVideoConpositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, oldTime) ofTrack:blendVideoTrack atTime:kCMTimeZero error:nil]; 

    AVMutableVideoCompositionLayerInstruction *blendVideoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:blendVideoConpositionTrack]; 

    AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
    MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, mainVideoUrlAsset.duration); 

    CGAffineTransform Scale = CGAffineTransformMakeScale(1.0f,1.0f); 
    CGAffineTransform Move = CGAffineTransformMakeTranslation(0,0); 
    [mainVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero]; 

    [blendVideoLayerInstruction setOpacity:0.5 atTime:kCMTimeZero]; 
// [blendVideoLayerInstruction setOpacity:0.0 atTime:timeNew]; 

    CGFloat cropOffX = 1.0; 
    CGFloat cropOffY = 1.0; 
    if(blendVideoSize.height>mainVideoSize.height) 
    { 
     cropOffY = mainVideoSize.height/blendVideoSize.height; 
    }else{ 

     cropOffY = mainVideoSize.height/blendVideoSize.height; 

    } 
    if(blendVideoSize.width>mainVideoSize.width) 
    { 
     cropOffX = mainVideoSize.width/blendVideoSize.width; 
    } 
    Scale = CGAffineTransformMakeScale(cropOffX,cropOffY); 
    Move = CGAffineTransformMakeTranslation(0.1, 0.1); 
    [blendVideoLayerInstruction setTransform:CGAffineTransformConcat(Scale,Move) atTime:kCMTimeZero]; 

    MainInstruction.layerInstructions = [NSArray arrayWithObjects:blendVideoLayerInstruction,mainVideoLayerInstruction,nil]; 

    AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 
    MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 
    MainCompositionInst.frameDuration = CMTimeMake(1, 30); 
    MainCompositionInst.renderSize = mainVideoSize; 


    NSString *fullName= [NSString stringWithFormat:@"video%d.mov",arc4random() % 1000]; 



    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
    NSString *documentsDirectory = [paths objectAtIndex:0]; 
    NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:fullName]; 
    if([[NSFileManager defaultManager] fileExistsAtPath:myPathDocs]) 
    { 
     [[NSFileManager defaultManager] removeItemAtPath:myPathDocs error:nil]; 
    } 
    NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
    exporter.outputURL=url; 

    CMTime start; 
    CMTime duration; 

    NSLog(@"Main Video dura %f blend dura - %f, ",CMTimeGetSeconds(mainVideoUrlAsset.duration),CMTimeGetSeconds(blendVideoUrlAsset.duration)); 


    if(CMTimeGetSeconds(blendVideoUrlAsset.duration)>CMTimeGetSeconds(mainVideoUrlAsset.duration)) 
    { 
     start = CMTimeMakeWithSeconds(0.0, blendVideoUrlAsset.duration.timescale); 
     duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), blendVideoUrlAsset.duration.timescale); 
    } 
    else if(CMTimeGetSeconds(mainVideoUrlAsset.duration)>CMTimeGetSeconds(blendVideoUrlAsset.duration)) 
    { 
     start = CMTimeMakeWithSeconds(0.0, mainVideoUrlAsset.duration.timescale); 
     duration = CMTimeMakeWithSeconds(CMTimeGetSeconds(mainVideoUrlAsset.duration), mainVideoUrlAsset.duration.timescale); 
    } 
    CMTimeRange range = CMTimeRangeMake(start, duration); 

    exporter.timeRange = range; 
    [exporter setVideoComposition:MainCompositionInst]; 
    exporter.outputFileType = AVFileTypeQuickTimeMovie; 

    __weak typeof(self) weakSelf = self; 

    [weakSelf createMBCircularProgress:exporter]; 


    [exporter exportAsynchronouslyWithCompletionHandler:^{ 
     dispatch_async(dispatch_get_main_queue(), ^{ 
      [weakSelf exportDidFinish:exporter]; 
     }); 
    }]; 
} 

這個代碼將在IOS 9乃至的iOS 10在iPhone 6S,6運行,5等,但這個代碼不會運行在iPhone 7模擬器。

的解決方案是我們需要使用最新的XCode 8.1測試版運行該

+1

分享代碼! – voromax

+0

任何簡單的代碼,甚至不寫在iOS 10 –

+0

實際代碼適用於iphone 6s和更早版本,但不適用於iPhone 7 –

回答

2
It's a bug. 

It's fixed in Xcode 8.1 beta. 

的Xcode 8.1測試版[AVAssetExportSession allExportPresets] iPhone 7模擬器現在返回

AVAssetExportPreset1920x1080, 
AVAssetExportPresetLowQuality, 
AVAssetExportPresetAppleM4A, 
AVAssetExportPreset640x480, 
AVAssetExportPreset3840x2160, 
AVAssetExportPresetHighestQuality, 
AVAssetExportPreset1280x720, 
AVAssetExportPresetMediumQuality, 
AVAssetExportPreset960x540 

的Xcode 8.0 AVAssetExportSession allExportPresets] iPhone 7模擬器返回一個空陣列

+1

很高興看到你找到答案,我們都很感激。但目前尚不清楚,你問過甚麼。請更新您的問題,以便此問答可以幫助其他人。在問題中發佈代碼和錯誤消息。 – pedrouan

+0

問題很簡單,AVAssetExportSession不能在iPhone中工作 –

+0

答案很簡單,會在XCode beta 8.1中運行 –

相關問題