2016-07-01 52 views
0

我是新來的Swift,我一直在嘗試使用OpenCV創建一個實時視頻處理應用程序。我正在使用Swift,iOS 9和Xcode 7.無法實現AVCaptureVideoDataOutputSampleBufferDelegate自我

我一直在試圖捕獲幀,我從一些教程中提出了以下代碼。

  1. 在下面的代碼我一直得到一個錯誤:

    func setupCameraSession() { 
        let devices = AVCaptureDevice.devices() 
        var captureDevice:AVCaptureDevice? 
    
        do { 
         if cameraType == CameraType.Front { 
          for device in devices { 
           if device.position == AVCaptureDevicePosition.Front { 
            captureDevice = device as? AVCaptureDevice 
            break 
           } 
          } 
         } 
         else { 
          captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice 
         } 
    
         let deviceInput = try AVCaptureDeviceInput(device: captureDevice) 
    
         cameraSession.beginConfiguration() 
    
         if (cameraSession.canAddInput(deviceInput) == true) { 
          cameraSession.addInput(deviceInput) 
         } 
    
         let dataOutput = AVCaptureVideoDataOutput() 
         dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
         dataOutput.alwaysDiscardsLateVideoFrames = true 
         dataOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)) 
    
         if (cameraSession.canAddOutput(dataOutput) == true) { 
          cameraSession.addOutput(dataOutput) 
         } 
    
         cameraSession.commitConfiguration() 
    
        } 
        catch let error as NSError { 
         NSLog("\(error), \(error.localizedDescription)") 
        } 
    } 
    

,出現以下錯誤:

FirstViewController.swift:137:48: 
Cannot convert value of type 'FirstViewController' to expected argument 
type 'AVCaptureVideoDataOutputSampleBufferDelegate!' 

導致錯誤與功能setSampleBufferDelegate '自我' 。

我相信這是捕捉每個幀的關鍵部分,但我不完全確定它的功能。

  • 我也想知道我應該如何使用下列函數來捕獲幀並處理它們爲的UIImage:

    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
        // Here you collect each frame and process it 
        print("frame received") 
    } 
    
    func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
        // Here you can count how many frames are dropped 
        print("frame dropped") 
    } 
    
  • 何時何地都調用這些函數,以及如何將每個捕獲的幀轉換爲UIImage?

    以下是整個代碼脫穎而出FirstViewController類:

    // 
    // FirstViewController.swift 
    // nVisoDemoApp 
    // 
    // Created by Timothy Llewellynn on 30/06/16. 
    // Copyright © 2016 Timothy Llewellynn. All rights reserved. 
    // 
    
    import UIKit 
    import AVFoundation 
    
    class FirstViewController: UIViewController, UITabBarControllerDelegate { 
    
        @IBOutlet weak var OpenCVVersion: UILabel! 
        @IBOutlet weak var OpenCVDisplay: UIImageView! 
    
        @IBOutlet weak var SadnessValue: UILabel! 
        @IBOutlet weak var NeutralValue: UILabel! 
        @IBOutlet weak var DisgustValue: UILabel! 
        @IBOutlet weak var AngerValue: UILabel! 
        @IBOutlet weak var SurpriseValue: UILabel! 
        @IBOutlet weak var FearValue: UILabel! 
        @IBOutlet weak var HappinessValue: UILabel! 
    
        enum CameraType { 
         case Front 
         case Back 
        } 
    
        var cameraType = CameraType.Front 
    
        override func viewDidLoad() { 
         super.viewDidLoad() 
         // Do any additional setup after loading the view, typically from a nib.//  SadnessValue.text = "[Value]" 
    //  NeutralValue.text = "[Value]" 
    //  DisgustValue.text = "[Value]" 
    //  AngerValue.text = "[Value]" 
    //  SurpriseValue.text = "[Value]" 
    //  FearValue.text = "[Value]" 
    //  HappinessValue.text = "[Value]" 
    
    //  OpenCVDisplay.image = 
    
         self.view.sendSubviewToBack(OpenCVDisplay) 
         setupCameraSession() 
         OpenCVVersion.text = CVWrapper.versionOpenCV() 
         OpenCVDisplay.layer.addSublayer(previewLayer) 
         cameraSession.startRunning() 
    
         let leftSwipe = UISwipeGestureRecognizer(target: self, action: Selector("handleSwipes:")) 
         leftSwipe.direction = .Left 
         view.addGestureRecognizer(leftSwipe) 
        } 
    
        func handleSwipes(sender:UISwipeGestureRecognizer) { 
         if (sender.direction == .Left) { 
          let selectedIndex: Int = self.tabBarController!.selectedIndex 
          self.tabBarController!.selectedIndex = selectedIndex + 1 
         } 
    
         if (sender.direction == .Right) { 
    
         } 
        } 
    
        override func viewDidAppear(animated: Bool) { 
         super.viewDidAppear(animated) 
    
         self.view.sendSubviewToBack(OpenCVDisplay) 
         setupCameraSession() 
         OpenCVVersion.text = CVWrapper.versionOpenCV() 
         OpenCVDisplay.layer.addSublayer(previewLayer) 
         cameraSession.startRunning() 
        } 
    
        override func viewWillDisappear(animated: Bool) { 
         super.viewWillDisappear(animated) 
    
         cameraSession.stopRunning() 
         previewLayer.removeFromSuperlayer() 
    
         let currentCameraInput: AVCaptureInput = cameraSession.inputs[0] as! AVCaptureInput 
         cameraSession.removeInput(currentCameraInput) 
        } 
    
        override func didReceiveMemoryWarning() { 
         super.didReceiveMemoryWarning() 
         // Dispose of any resources that can be recreated. 
        } 
    
        lazy var cameraSession: AVCaptureSession = { 
         let s = AVCaptureSession() 
         s.sessionPreset = AVCaptureSessionPresetHigh 
         return s 
        }() 
    
        lazy var previewLayer: AVCaptureVideoPreviewLayer = { 
         let preview = AVCaptureVideoPreviewLayer(session: self.cameraSession) 
         preview.bounds = CGRect(x: 0, y: 0, width: self.view.bounds.width, height: self.view.bounds.height) 
         preview.position = CGPoint(x: CGRectGetMidX(self.view.bounds), y: CGRectGetMidY(self.view.bounds)) 
         preview.videoGravity = AVLayerVideoGravityResize 
         return preview 
        }() 
    
        func setupCameraSession() { 
         let devices = AVCaptureDevice.devices() 
         var captureDevice:AVCaptureDevice? 
    
         do { 
          if cameraType == CameraType.Front { 
           for device in devices { 
            if device.position == AVCaptureDevicePosition.Front { 
             captureDevice = device as? AVCaptureDevice 
             break 
            } 
           } 
          } 
          else { 
           captureDevice = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo) as AVCaptureDevice 
          } 
    
          let deviceInput = try AVCaptureDeviceInput(device: captureDevice) 
    
          cameraSession.beginConfiguration() 
    
          if (cameraSession.canAddInput(deviceInput) == true) { 
           cameraSession.addInput(deviceInput) 
          } 
    
          let dataOutput = AVCaptureVideoDataOutput() 
          dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString) : NSNumber(unsignedInt: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
          dataOutput.alwaysDiscardsLateVideoFrames = true 
    
    //   let queue = dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL) 
    //   dataOutput.setSampleBufferDelegate(self, queue: queue) 
          dataOutput.setSampleBufferDelegate(self, queue: dispatch_queue_create("cameraQueue", DISPATCH_QUEUE_SERIAL)) 
          if (cameraSession.canAddOutput(dataOutput) == true) { 
           cameraSession.addOutput(dataOutput) 
          } 
          /Users/tllewellynn/Desktop/dev/nVisoDemo/nVisoDemo/FirstViewController.swift:137:48: Cannot convert value of type 'FirstViewController' to expected argument type 'AVCaptureVideoDataOutputSampleBufferDelegate!' 
          cameraSession.commitConfiguration() 
    
         } 
         catch let error as NSError { 
          NSLog("\(error), \(error.localizedDescription)") 
         } 
        } 
    
    // func capturePicture(){ 
    //   
    //  print("Capturing image") 
    //  var stillImageOutput = AVCaptureStillImageOutput() 
    //  stillImageOutput.outputSettings = [AVVideoCodecKey: AVVideoCodecJPEG] 
    //  cameraSession.addOutput(stillImageOutput) 
    //   
    //  if let videoConnection = stillImageOutput.connectionWithMediaType(AVMediaTypeVideo){ 
    //   stillImageOutput.captureStillImageAsynchronouslyFromConnection(videoConnection, completionHandler: { 
    //    (sampleBuffer, error) in 
    //    var imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer) 
    //    var dataProvider = CGDataProviderCreateWithCFData(imageData) 
    //    var cgImageRef = CGImageCreateWithJPEGDataProvider(dataProvider, nil, true, CGColorRenderingIntent.RenderingIntentDefault) 
    ////    var image = UIImage(CGImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.Right) 
    //     
    ////    var imageView = UIImageView(image: image) 
    ////    imageView.frame = CGRect(x:0, y:0, width:self.screenSize.width, height:self.screenSize.height) 
    ////     
    ////    //Show the captured image to 
    ////    self.view.addSubview(imageView) 
    ////     
    ////    //Save the captured preview to image 
    ////    UIImageWriteToSavedPhotosAlbum(image, nil, nil, nil) 
    //     
    //   }) 
    //  } 
    // } 
    
        @IBAction func SwitchCameraAction(sender: UIButton) { 
         cameraType = cameraType == CameraType.Back ? CameraType.Front : CameraType.Back 
         cameraSession.stopRunning() 
         previewLayer.removeFromSuperlayer() 
    
         let currentCameraInput: AVCaptureInput = cameraSession.inputs[0] as! AVCaptureInput 
         cameraSession.removeInput(currentCameraInput) 
    
         setupCameraSession() 
         OpenCVDisplay.layer.addSublayer(previewLayer) 
         cameraSession.startRunning() 
        } 
    
        func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
         // Here you collect each frame and process it 
         print("frame received") 
        } 
    
        func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
         // Here you can count how many frames are dropped 
         print("frame dropped") 
        } 
    } 
    

    任何見解?

    回答

    3
    class FirstViewController: UIViewController, UITabBarControllerDelegate 
    

    只是

    class FirstViewController: UIViewController, UITabBarControllerDelegate,AVCaptureVideoDataOutputSampleBufferDelegate 
    
    改變