2016-09-10 75 views
9

您好,我跟隨由Jared Davidson創建自定義相機視圖並使用AVFoundation保存圖片的過程。 https://www.youtube.com/watch?v=w0O3ZGUS3pk保存視頻使用AVFoundation斯威夫特

但是我想記錄並保存視頻,而不是圖像。有人可以幫我嗎?我確定它的簡單但蘋果的文檔是用Objective-C編寫的,我無法破譯它。

這是我的代碼。謝謝。

import UIKit 
import AVFoundation 

class ViewController: UIViewController { 

    var captureSession = AVCaptureSession() 
    var sessionOutput = AVCaptureStillImageOutput() 
    var previewLayer = AVCaptureVideoPreviewLayer() 


    @IBOutlet var cameraView: UIView! 

    override func viewWillAppear(animated: Bool) { 

     let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) 
     for device in devices { 
      if device.position == AVCaptureDevicePosition.Front{ 


       do{ 

        let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) 

        if captureSession.canAddInput(input){ 

         captureSession.addInput(input) 
         sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] 

         if captureSession.canAddOutput(sessionOutput){ 

          captureSession.addOutput(sessionOutput) 
          captureSession.startRunning() 

          previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
          previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
          previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait 
          cameraView.layer.addSublayer(previewLayer) 

          previewLayer.position = CGPoint(x: self.cameraView.frame.width/2, y: self.cameraView.frame.height/2) 
          previewLayer.bounds = cameraView.frame 


         } 

        } 

       } 
       catch{ 

        print("Error") 
       } 

      } 
     }  

    } 


    @IBAction func TakePhoto(sender: AnyObject) { 

     if let videoConnection = sessionOutput.connectionWithMediaType(AVMediaTypeVideo){ 

      sessionOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { 
       buffer, error in 

       let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) 
       UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData)!, nil, nil, nil) 

      }) 

     } 

    } 

} 

回答

15

您可以保存錄制的視頻通過創建並添加AVCaptureMovieFileOutput到您的捕獲會話,使您ViewController符合AVCaptureFileOutputRecordingDelegate到文件。

本示例將5秒的視頻記錄到應用程序的Documents目錄中名爲「output.mov」的文件中。

class ViewController: UIViewController, AVCaptureFileOutputRecordingDelegate { 

    var captureSession = AVCaptureSession() 
    var sessionOutput = AVCaptureStillImageOutput() 
    var movieOutput = AVCaptureMovieFileOutput() 
    var previewLayer = AVCaptureVideoPreviewLayer() 

    @IBOutlet var cameraView: UIView! 

    override func viewWillAppear(animated: Bool) { 
     self.cameraView = self.view 

     let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) 
     for device in devices { 
      if device.position == AVCaptureDevicePosition.Front{ 


       do{ 

        let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) 

        if captureSession.canAddInput(input){ 

         captureSession.addInput(input) 
         sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] 

         if captureSession.canAddOutput(sessionOutput){ 

          captureSession.addOutput(sessionOutput) 

          previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
          previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
          previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.Portrait 
          cameraView.layer.addSublayer(previewLayer) 

          previewLayer.position = CGPoint(x: self.cameraView.frame.width/2, y: self.cameraView.frame.height/2) 
          previewLayer.bounds = cameraView.frame 


         } 

         captureSession.addOutput(movieOutput) 

         captureSession.startRunning() 

         let paths = NSFileManager.defaultManager().URLsForDirectory(.DocumentDirectory, inDomains: .UserDomainMask) 
         let fileUrl = paths[0].URLByAppendingPathComponent("output.mov") 
         try? NSFileManager.defaultManager().removeItemAtURL(fileUrl) 
         movieOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self) 

         let delayTime = dispatch_time(DISPATCH_TIME_NOW, Int64(5 * Double(NSEC_PER_SEC))) 
         dispatch_after(delayTime, dispatch_get_main_queue()) { 
          print("stopping") 
          self.movieOutput.stopRecording() 
         } 
        } 

       } 
       catch{ 

        print("Error") 
       } 

      } 
     } 

    } 

    func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 
     print("FINISHED \(error)") 
     // save video to camera roll 
     if error == nil { 
      UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, nil, nil, nil) 
     } 
    } 

} 
+0

您可以從Xcode>設備> iPhone> _app name_>下載容器中的設備下載文檔目錄。您可以通過調用'didFinishRecordingToOutputFileAtURL'委託方法中的'UISaveVideoAtPathToSavedPhotosAlbum()'將視頻保存到相機膠捲。如果您的問題得到解答,請接受此答案。 –

+1

哈哈非常感謝!希望這可以幫助別人,因爲我看到很多人在問。 –

5

謝謝你。這對我非常有幫助。這裏有一個Rhythmic Fistman的答案,用所需的導入語句和委託方法移植到Swift 3的一個版本。

import UIKit 
import AVFoundation 

class ViewController: UIViewController, 
AVCaptureFileOutputRecordingDelegate { 

var captureSession = AVCaptureSession() 
var sessionOutput = AVCaptureStillImageOutput() 
var movieOutput = AVCaptureMovieFileOutput() 
var previewLayer = AVCaptureVideoPreviewLayer() 

@IBOutlet var cameraView: UIView! 

override func viewWillAppear(_ animated: Bool) { 
    self.cameraView = self.view 

    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) 
    for device in devices! { 
     if (device as AnyObject).position == AVCaptureDevicePosition.front{ 


      do{ 

       let input = try AVCaptureDeviceInput(device: device as! AVCaptureDevice) 

       if captureSession.canAddInput(input){ 

        captureSession.addInput(input) 
        sessionOutput.outputSettings = [AVVideoCodecKey : AVVideoCodecJPEG] 

        if captureSession.canAddOutput(sessionOutput){ 

         captureSession.addOutput(sessionOutput) 

         previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
         previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
         previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait 
         cameraView.layer.addSublayer(previewLayer) 

         previewLayer.position = CGPoint(x: self.cameraView.frame.width/2, y: self.cameraView.frame.height/2) 
         previewLayer.bounds = cameraView.frame 


        } 

        captureSession.addOutput(movieOutput) 

        captureSession.startRunning() 

        let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 
        let fileUrl = paths[0].appendingPathComponent("output.mov") 
        try? FileManager.default.removeItem(at: fileUrl) 
        movieOutput.startRecording(toOutputFileURL: fileUrl, recordingDelegate: self) 

        let delayTime = DispatchTime.now() + 5 
        DispatchQueue.main.asyncAfter(deadline: delayTime) { 
         print("stopping") 
         self.movieOutput.stopRecording() 
        } 
       } 

      } 
      catch{ 

       print("Error") 
      } 

     } 
    } 

} 


//MARK: AVCaptureFileOutputRecordingDelegate Methods 

func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 
    print("FINISHED \(error)") 
    // save video to camera roll 
    if error == nil { 
     UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path, nil, nil, nil) 
    } 
} 

}

+0

視頻正在錄製並保存在庫中,但聲音未被錄製。請幫幫我。 – iDev750

0

if (device as AnyObject).position == AVCaptureDevicePosition.front{

添加

// Audio Input 
       let audioInputDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 

       do 
       { 
        let audioInput = try AVCaptureDeviceInput(device: audioInputDevice) 

        // Add Audio Input 
        if captureSession.canAddInput(audioInput) 
        { 
         captureSession.addInput(audioInput) 
        } 
        else 
        { 
         NSLog("Can't Add Audio Input") 
        } 
       } 
       catch let error 
       { 
        NSLog("Error Getting Input Device: \(error)") 
       } 

感謝

0

對於錄音的問題,

創建captureSession

askMicroPhonePermission(完成時,添加以下代碼:在

  if isMicrophonePermissionGiven { 
       do { 
        try self.captureSession.addInput(AVCaptureDeviceInput(device: self.captureAudio)) 
       } catch { 
        print("Error creating the database") 
       } 
      } 
     }) 

{(isMicrophonePermissionGiven)///////////////////// ///////////////////////

askMicroPhonePermission function is如下所示

func askMicroPhonePermission(completion: @escaping (_ success: Bool)-> Void) { 
    switch AVAudioSession.sharedInstance().recordPermission() { 
    case AVAudioSessionRecordPermission.granted: 
     completion(true) 
    case AVAudioSessionRecordPermission.denied: 
     completion(false) //show alert if required 
    case AVAudioSessionRecordPermission.undetermined: 
     AVAudioSession.sharedInstance().requestRecordPermission({ (granted) in 
      if granted { 
       completion(true) 
      } else { 
       completion(false) // show alert if required 
      } 
     }) 
    default: 
     completion(false) 
    } 
} 

而且您必須添加NSMicrophoneUsageDescription鍵info.plist文件中的值。