2016-04-12 20 views
0

我正在開發一個應用程序,人們可以在視頻上疊加水印,到目前爲止,我已經能夠成功完成該任務。然而,當我保存自定義視頻覆蓋的第一次,如果我嘗試再次保存視頻與所述錯誤失敗:AVAssetExportSession第一次工作,但如果我再試一次失敗 - 視頻無法合成

Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.})

下面是我保存的視頻代碼:

@IBAction func saveVideo(sender: AnyObject) { 

    self.videoAsset = AVAsset(URL: fileURL as NSURL!) 

    // Create Video track (Video + Audio) 
    let videoTrack: AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 
    let audioTrack:AVMutableCompositionTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeAudio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 

    do { 
     try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration), ofTrack: self.videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] , atTime: kCMTimeZero) 
     try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration), ofTrack: self.videoAsset.tracksWithMediaType(AVMediaTypeAudio)[0] , atTime: kCMTimeZero) 
     print("Inserted time ranges just fine\n") 
    } catch let error as NSError { 
     print("Failed to insert video/audio tracks!!!!\n") 
     print(error.localizedDescription) 
    } 

    videoLayerIntruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let videoAssetTrack: AVAssetTrack = self.videoAsset.tracksWithMediaType(AVMediaTypeVideo)[0] 

    var videoAssetOrientation_: UIImageOrientation = .Up 
    var isVideoAssetPortrait_: Bool = false 

    let videoTransform:CGAffineTransform = videoAssetTrack.preferredTransform 

    if videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0 { 
     videoAssetOrientation_ = .Right 
     isVideoAssetPortrait_ = true 
    } 
    if videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0 { 
     videoAssetOrientation_ = .Left 
     isVideoAssetPortrait_ = true 
    } 
    if videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0 { 
     videoAssetOrientation_ = .Up 
    } 
    if videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0 { 
     videoAssetOrientation_ = .Down 
    } 

    videoLayerIntruction.setTransform(videoAssetTrack.preferredTransform, atTime: kCMTimeZero) 

    mainInstruction.layerInstructions = [videoLayerIntruction] 
    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, self.videoAsset.duration) 


    var naturalSize = CGSize() 

    if isVideoAssetPortrait_ { 
     naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width) 
    } else { 
     naturalSize = videoAssetTrack.naturalSize 
    } 

    renderWidth = naturalSize.width 
    renderHeight = naturalSize.height 

    parentLayer.frame = CGRectMake(0, 0, renderWidth, renderHeight) 
    parentLayer.geometryFlipped = true 
    parentLayer.anchorPoint = CGPointMake(0.5, 0.5) 

    videoLayer.frame = CGRectMake(0, 0, renderWidth, renderHeight) 

    self.overlayLayer.frame = CGRectMake(self.renderWidth, self.renderHeight, self.newRatioWidth, self.newRatioHeight) 
    self.overlayLayer.addAnimation(self.animation, forKey: "contents") 
    self.overlayLayer.anchorPoint = CGPointMake(0.5, 0.5) 
    self.overlayLayer.contentsGravity = kCAGravityResizeAspect 

    parentLayer.addSublayer(videoLayer) 

    if addedOverlay == true { 
     parentLayer.addSublayer(overlayLayer) 
    } 

    mainCompositionInst.renderScale = 1.0 
    mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight) 
    mainCompositionInst.instructions = [mainInstruction] 
    mainCompositionInst.frameDuration = CMTimeMake(1, 30) 
    mainCompositionInst.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videoLayer, inLayer: parentLayer) 

    outputURL = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("CreatedVideo-\(NSUUID().UUIDString).mov") 

    let exporter: AVAssetExportSession = AVAssetExportSession(asset: videoAsset, presetName: AVAssetExportPresetHighestQuality)! 
    exporter.outputURL = outputURL 
    exporter.outputFileType = AVFileTypeQuickTimeMovie 
    exporter.shouldOptimizeForNetworkUse = false 
    exporter.videoComposition = mainCompositionInst 

    exporter.exportAsynchronouslyWithCompletionHandler({ 
     dispatch_async(dispatch_get_main_queue(), { 
      self.exportVideo(exporter) 
      switch exporter.status{ 
      case AVAssetExportSessionStatus.Failed: 
       print("FAILED EXPORT - \(exporter.error)\n") 
      case AVAssetExportSessionStatus.Cancelled: 
       print("canceled \(exporter.error)\n") 
      default: 
       print("COMPLETED EXPORT\n") 
      } 
     }) 
    }) 
} 

func exportVideo(sender: AVAssetExportSession) { 
    print("Asked to export\n") 
    PHPhotoLibrary.sharedPhotoLibrary().performChanges({ 
    PHAssetChangeRequest.creationRequestForAssetFromVideoAtFileURL(sender.outputURL!) 
     }, completionHandler: { success, error in 
      if success { 
       print("Success! Finished saving video.") 
      } else { 
       print("ERROR - " + (error?.localizedDescription)!) 
      } 
    }) 
} 

有沒有人知道在第一次成功工作後,組合物開始失敗的原因是什麼?會愛任何建議!

+1

這聽起來像它的答案您尋找:http://stackoverflow.com/a/31146867/1638273 – SeanLintern88

+0

如何在地球上做我沒看到這個。讓我質疑我的理智和搜索能力....非常感謝你! – riverhawk

+0

@riverhawk我面臨同樣的問題。 @ SeanLintern88提供的解決方案與您所做的相同:'videoLayerIntruction = AVMutableVideoCompositionLayerInstruction(assetTrack:videoTrack)'這是如何解決問題的? –

回答

相關問題