2017-07-18 62 views
2

我是swift的新手,嘗試構建可應用實時過濾器並使用應用過濾器保存的攝像頭應用程序。在Swift中使用實時過濾器錄製視頻

到目前爲止,我可以實時預覽應用的濾鏡,但是當我將視頻全部保存爲黑色時。

import UIKit 
import AVFoundation 
import AssetsLibrary 
import CoreMedia 
import Photos 

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate { 

    var captureSession: AVCaptureSession! 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet weak var recordButtton: UIButton! 
    @IBOutlet weak var imageView: UIImageView! 

    var assetWriter: AVAssetWriter? 
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor? 
    var isWriting = false 
    var currentSampleTime: CMTime? 
    var currentVideoDimensions: CMVideoDimensions? 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     FilterVendor.register() 
     setupCaptureSession() 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
    } 

    func setupCaptureSession() { 
     let captureSession = AVCaptureSession() 
     captureSession.sessionPreset = AVCaptureSessionPresetPhoto 

     guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else { 
      print("Can't access the camera") 
      return 
     } 

     if captureSession.canAddInput(input) { 
      captureSession.addInput(input) 
     } 

     let videoOutput = AVCaptureVideoDataOutput() 

     videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) 
     if captureSession.canAddOutput(videoOutput) { 
      captureSession.addOutput(videoOutput) 
     } 

     let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     if((previewLayer) != nil) { 
      view.layer.addSublayer(previewLayer!) 
     } 

     captureSession.startRunning() 
    } 

    @IBAction func record(_ sender: Any) { 
     if isWriting { 
      print("stop record") 
      self.isWriting = false 
      assetWriterPixelBufferInput = nil 
      assetWriter?.finishWriting(completionHandler: {[unowned self]() -> Void in 
       self.saveMovieToCameraRoll() 
      }) 
     } else { 
      print("start record") 
      createWriter() 
      assetWriter?.startWriting() 
      assetWriter?.startSession(atSourceTime: currentSampleTime!) 
      isWriting = true 
     } 
    } 

    func saveMovieToCameraRoll() { 
     PHPhotoLibrary.shared().performChanges({ 
      PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL) 
     }) { saved, error in 
      if saved { 
       print("saved") 
      } 
     } 
    } 

    func movieURL() -> NSURL { 
     let tempDir = NSTemporaryDirectory() 
     let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov") 
     return url! as NSURL 
    } 

    func checkForAndDeleteFile() { 
     let fm = FileManager.default 
     let url = movieURL() 
     let exist = fm.fileExists(atPath: url.path!) 

     if exist { 
      do { 
       try fm.removeItem(at: url as URL) 
      } catch let error as NSError { 
       print(error.localizedDescription) 
      } 
     } 
    } 

    func createWriter() { 
     self.checkForAndDeleteFile() 

     do { 
      assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie) 
     } catch let error as NSError { 
      print(error.localizedDescription) 
      return 
     } 

     let outputSettings = [ 
      AVVideoCodecKey : AVVideoCodecH264, 
      AVVideoWidthKey : Int(currentVideoDimensions!.width), 
      AVVideoHeightKey : Int(currentVideoDimensions!.height) 
     ] as [String : Any] 

     let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject]) 
     assetWriterVideoInput.expectsMediaDataInRealTime = true 
     assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI/2.0)) 

     let sourcePixelBufferAttributesDictionary = [ 
      String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA), 
      String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width), 
      String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height), 
      String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue 
     ] as [String : Any] 

     assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, 
                      sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) 

     if assetWriter!.canAdd(assetWriterVideoInput) { 
      assetWriter!.add(assetWriterVideoInput) 
     } else { 
      print("no way\(assetWriterVideoInput)") 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
     autoreleasepool { 

      connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

      guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 
      let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

      let filter = CIFilter(name: "Fİlter")! 
      filter.setValue(cameraImage, forKey: kCIInputImageKey) 


      let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
      self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
      self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

      if self.isWriting { 
       if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
        var newPixelBuffer: CVPixelBuffer? = nil 

        CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

        let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

        if success == false { 
         print("Pixel Buffer failed") 
        } 
       } 
      } 

      DispatchQueue.main.async { 

       if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
        let filteredImage = UIImage(ciImage: outputValue) 
        self.imageView.image = filteredImage 
       } 
      } 
     } 
    } 
} 
+0

您是否嘗試過保存沒有過濾器的視頻? – Simon

+0

@Simon沒有區別:( – hackio

回答

4

我添加了一些意見,以低於臨界部分:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
    autoreleasepool { 

     connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

     // COMMENT: This line makes sense - this is your pixelbuffer from the camera. 
     guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 

     // COMMENT: OK, so you turn pixelBuffer into a CIImage... 
     let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

     // COMMENT: And now you've create a CIImage with a Filter instruction... 
     let filter = CIFilter(name: "Fİlter")! 
     filter.setValue(cameraImage, forKey: kCIInputImageKey) 


     let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
     self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
     self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

     if self.isWriting { 
      if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
       // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write... 
       var newPixelBuffer: CVPixelBuffer? = nil 

       // COMMENT: And you grabbed memory from the pool. 
       CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

       // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame. 
       let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

       if success == false { 
        print("Pixel Buffer failed") 
       } 
      } 
     } 

     // COMMENT: And now you're sending the filtered image back to the screen. 
     DispatchQueue.main.async { 

      if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
       let filteredImage = UIImage(ciImage: outputValue) 
       self.imageView.image = filteredImage 
      } 
     } 
    } 
} 

它看起來對我來說,你基本上得到了屏幕圖像,創建過濾的副本,然後又做了新的象素緩衝區是空的並寫出來。

如果您編寫的像素緩衝區取代了您創建的新像素,則應該成功寫入圖像。

你需要成功寫出過濾的視頻是從CIImage創建一個新的CVPixelBuffer - 這個解決方案已經存在於StackOverflow中,我知道,因爲我自己需要這一步!

+0

不是基於swift的,但這裏是幾年前相關的ObjectiveC代碼現在https://stackoverflow.com/questions/22819337/adding-filters-to-video-with-avfoundation-osx-我怎麼寫我的結果我 –

+0

謝謝蒂姆:) – hackio