2016-03-30 84 views
1

我試圖在iPhone上快速創建一個使用相機的照相機。這個想法是,所有像素都需要強度分量,然後平均它們給我一個單一的值。我不需要預覽相機。我一直在拼湊一對夫婦的教程,試圖讓它起作用,到目前爲止已經提出了下面的代碼。 camDeviceSetup()在ViewDidLoad上運行,cameraSetup()運行在按下按鈕上。採樣緩衝區代表Swift 2實時視頻濾波器

我遇到了啓動「videoDeviceOutput!.setSampleBufferDelegate」的錯誤,它說它不能將類型FirstViewController(視圖控制器)的值轉換爲期望的參數。

let captureSession = AVCaptureSession() 
// If we find a device we'll store it here for later use 
var captureDevice : AVCaptureDevice? 
var videoDeviceOutput: AVCaptureVideoDataOutput? 
// AVCaptureVideoPreviewLayer is a subclass of CALayer that you use to display video as it is being captured by an input device. 
var previewLayer = AVCaptureVideoPreviewLayer() 

func camDeviceSetup() { 
    captureSession.sessionPreset = AVCaptureSessionPreset640x480 
    let devices = AVCaptureDevice.devices() 
    for device in devices { 
     // Make sure this particular device supports video 
     if (device.hasMediaType(AVMediaTypeVideo)) { 
      // Finally check the position and confirm we've got the back camera 
      if(device.position == AVCaptureDevicePosition.Back) { 
       captureDevice = device as? AVCaptureDevice 
      } 
     } 
    } 
    if captureDevice != nil { 
     let err : NSError? = nil 
     captureSession.addInput(try! AVCaptureDeviceInput(device: captureDevice)) 

     if err != nil { 
      print("error: \(err?.localizedDescription)") 
     } 

    } 
} 

func cameraSetup() { 
    previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
    previewLayer.frame = view.bounds 
    view.layer.addSublayer(previewLayer) 

    videoDeviceOutput = AVCaptureVideoDataOutput() 
    videoDeviceOutput!.videoSettings = [kCVPixelBufferPixelFormatTypeKey:Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)] 
    videoDeviceOutput!.alwaysDiscardsLateVideoFrames = true 

//This is the line that gets stuck and not sure why 
    videoDeviceOutput!.setSampleBufferDelegate(self, queue: dispatch_queue_create("VideoBuffer", DISPATCH_QUEUE_SERIAL)) 

    if captureSession.canAddOutput(videoDeviceOutput) { 
     captureSession.addOutput(videoDeviceOutput) 
    } 

    captureSession.startRunning() 
} 

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { 
    // Think once the delegate is correctly set my algorithm for finding light intensity goes here 

} 
+0

該行的問題取決於我沒有在我的ViewController頂部的類中聲明AVCaptureVideoDataOutputSampleBufferDelegate。 – rmaspero

回答

0

在該行的問題是下到我不在類在我的ViewController的頂部聲明AVCaptureVideoDataOutputSampleBufferDelegate。

+0

是否將AVCaptureVideoDataOutputSampleBufferDelegate分配給vc? –