2016-05-29 181 views
1

我想重疊兩個視頻,前景視頻是有點透明。我一直在關注Apple Docs以及This tutorial覆蓋AVFoundation兩個視頻

每當我嘗試將兩個相同的視頻通過my code它不會崩潰;然而,當我嘗試餵養它兩個不同的影片,我收到此錯誤:

VideoMaskingUtils.exportVideo Error: Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}) 
VideoMaskingUtils.exportVideo Description: <AVAssetExportSession: 0x1556be30, asset = <AVMutableComposition: 0x15567f10 tracks = (
"<AVMutableCompositionTrack: 0x15658030 trackID = 1, mediaType = vide, editCount = 1>", 
"<AVMutableCompositionTrack: 0x1556e250 trackID = 2, mediaType = vide, editCount = 1>" 
)>, presetName = AVAssetExportPresetHighestQuality, outputFileType = public.mpeg-4 
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.} 

我明白you can't save a video with an alpha channel iOS上的 - 我想這兩個視頻壓扁成一個不透明的視頻。

當試圖重疊兩個視頻並使用CATransforms應用PiP風格時,它崩潰;只是重疊他們(沒有阿爾法或任何其他影響應用工作) 任何幫助表示讚賞。

這裏是我的代碼(在這兩種方法):

class func overlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset, andAlpha alpha: Float) { 

    let mixComposition = AVMutableComposition() 

    let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 
    let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 


    guard let firstMediaTrack = firstAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return } 
    guard let secondMediaTrack = secondAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return } 
    do { 
     try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstMediaTrack, atTime: kCMTimeZero) 
     try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondMediaTrack, atTime: kCMTimeZero) 
    } catch (let error) { 
     print(error) 
    } 

    let width = max(firstMediaTrack.naturalSize.width, secondMediaTrack.naturalSize.width) 
    let height = max(firstMediaTrack.naturalSize.height, secondMediaTrack.naturalSize.height) 

    let videoComposition = AVMutableVideoComposition() 
    videoComposition.renderSize = CGSizeMake(width, height) 
    videoComposition.frameDuration = firstMediaTrack.minFrameDuration 


    let firstApproach = false 
    if firstApproach { 
     let mainInstruction = AVMutableVideoCompositionInstruction() 
     mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration) 
     mainInstruction.backgroundColor = UIColor.redColor().CGColor 

     let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack) 
     firstlayerInstruction.setTransform(firstAsset.preferredTransform, atTime: kCMTimeZero) 

     let secondInstruction = AVMutableVideoCompositionInstruction() 
     secondInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration) 
     let backgroundColor = UIColor(colorLiteralRed: 1.0, green: 1.0, blue: 1.0, alpha: alpha) 
     secondInstruction.backgroundColor = backgroundColor.CGColor 

     let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack) 
     secondlayerInstruction.setTransform(secondAsset.preferredTransform, atTime: kCMTimeZero) 

     secondInstruction.layerInstructions = [secondlayerInstruction] 

     mainInstruction.layerInstructions = [firstlayerInstruction]//, secondlayerInstruction] 

     videoComposition.instructions = [mainInstruction, secondInstruction] 

    } else { 
     let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstMediaTrack) 
     firstLayerInstruction.setTransform(firstMediaTrack.preferredTransform, atTime: kCMTimeZero) 
     firstLayerInstruction.setOpacity(1.0, atTime: kCMTimeZero) 

     let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondMediaTrack) 
     secondlayerInstruction.setTransform(secondMediaTrack.preferredTransform, atTime: kCMTimeZero) 
     secondlayerInstruction.setOpacity(alpha, atTime: kCMTimeZero) 


     let instruction = AVMutableVideoCompositionInstruction() 
     instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration)) 
     instruction.layerInstructions = [firstLayerInstruction, secondlayerInstruction] 

     videoComposition.instructions = [instruction] 
    } 



    let outputUrl = VideoMaskingUtils.getPathForTempFileNamed("output.mov") 

    VideoMaskingUtils.exportCompositedVideo(mixComposition, toURL: outputUrl, withVideoComposition: videoComposition) 

    VideoMaskingUtils.removeTempFileAtPath(outputUrl.absoluteString) 
} 

這是我exportCompositedVideo功能。

private class func exportCompositedVideo(compiledVideo: AVMutableComposition, toURL outputUrl: NSURL, withVideoComposition videoComposition: AVMutableVideoComposition) { 
    guard let exporter = AVAssetExportSession(asset: compiledVideo, presetName: AVAssetExportPresetHighestQuality) else { return } 
    exporter.outputURL = outputUrl 
    exporter.videoComposition = videoComposition 
    exporter.outputFileType = AVFileTypeQuickTimeMovie 
    exporter.shouldOptimizeForNetworkUse = true 
    exporter.exportAsynchronouslyWithCompletionHandler({ 
     switch exporter.status { 
     case .Completed: 
      // we can be confident that there is a URL because 
      // we got this far. Otherwise it would've failed. 
      UISaveVideoAtPathToSavedPhotosAlbum(exporter.outputURL!.path!, nil, nil, nil) 
      print("VideoMaskingUtils.exportVideo SUCCESS!") 
      if exporter.error != nil { 
       print("VideoMaskingUtils.exportVideo Error: \(exporter.error)") 
       print("VideoMaskingUtils.exportVideo Description: \(exporter.description)") 
      } 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error) 
      break 

     case .Exporting: 
      let progress = exporter.progress 
      print("VideoMaskingUtils.exportVideo \(progress)") 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportProgress", object: progress) 
      break 

     case .Failed: 
      print("VideoMaskingUtils.exportVideo Error: \(exporter.error)") 
      print("VideoMaskingUtils.exportVideo Description: \(exporter.description)") 

      NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error) 
      break 

     default: break 
     } 
    }) 
} 
+0

與此類似[懸而未決的問題] (http://stackoverflow.com/questions/17909906/avfoundation-to-overlay-an-alpha-channel-video-on-anoth ER-視頻)。 –

+0

請參閱上述問題的鏈接,解決方案是使用支持和alpha通道的編碼方法,如鏈接中所述。 iOS默認無法使用H.264。 – MoDJ

回答

0

min應該是max ...

替換該行

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration)) 

這條線,也將努力:

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, max(firstAsset.duration, secondAsset.duration))