我已經找到了解決方案使用AVFoundation,將創建方臉iOS上的實時跟蹤。我在這裏修改了一些代碼。
import UIKit
import AVFoundation
class DetailsView: UIView {
func setup() {
layer.borderColor = UIColor.red.withAlphaComponent(0.7).cgColor
layer.borderWidth = 5.0
}
}
class ViewController: UIViewController {
let stillImageOutput = AVCaptureStillImageOutput()
var session: AVCaptureSession?
var stillOutput = AVCaptureStillImageOutput()
var borderLayer: CAShapeLayer?
let detailsView: DetailsView = {
let detailsView = DetailsView()
detailsView.setup()
return detailsView
}()
lazy var previewLayer: AVCaptureVideoPreviewLayer? = {
var previewLay = AVCaptureVideoPreviewLayer(session: self.session!)
previewLay?.videoGravity = AVLayerVideoGravityResizeAspectFill
return previewLay
}()
lazy var frontCamera: AVCaptureDevice? = {
guard let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as? [AVCaptureDevice] else { return nil }
return devices.filter { $0.position == .front }.first
}()
let faceDetector = CIDetector(ofType: CIDetectorTypeFace, context: nil, options: [CIDetectorAccuracy : CIDetectorAccuracyLow])
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
previewLayer?.frame = view.frame
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
guard let previewLayer = previewLayer else { return }
view.layer.addSublayer(previewLayer)
view.addSubview(detailsView)
view.bringSubview(toFront: detailsView)
}
override func viewDidLoad() {
super.viewDidLoad()
sessionPrepare()
session?.startRunning()
}
//function to store image
func saveToCamera() {
if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) {
stillImageOutput.captureStillImageAsynchronously(from: videoConnection, completionHandler: { (CMSampleBuffer, Error) in
if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(CMSampleBuffer) {
if let cameraImage = UIImage(data: imageData) {
UIImageWriteToSavedPhotosAlbum(cameraImage, nil, nil, nil)
}
}
})
}
}
}
extension ViewController {
func sessionPrepare() {
session = AVCaptureSession()
guard let session = session, let captureDevice = frontCamera else { return }
session.sessionPreset = AVCaptureSessionPresetPhoto
do {
let deviceInput = try AVCaptureDeviceInput(device: captureDevice)
session.beginConfiguration()
stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG]
if session.canAddOutput(stillImageOutput) {
session.addOutput(stillImageOutput)
}
if session.canAddInput(deviceInput) {
session.addInput(deviceInput)
}
let output = AVCaptureVideoDataOutput()
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
output.alwaysDiscardsLateVideoFrames = true
if session.canAddOutput(output) {
session.addOutput(output)
}
session.commitConfiguration()
let queue = DispatchQueue(label: "output.queue")
output.setSampleBufferDelegate(self, queue: queue)
} catch {
print("error with creating AVCaptureDeviceInput")
}
}
}
extension ViewController: AVCaptureVideoDataOutputSampleBufferDelegate {
func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate)
let ciImage = CIImage(cvImageBuffer: pixelBuffer!, options: attachments as! [String : Any]?)
let options: [String : Any] = [CIDetectorImageOrientation: exifOrientation(orientation: UIDevice.current.orientation),
CIDetectorSmile: true,
CIDetectorEyeBlink: true]
let allFeatures = faceDetector?.features(in: ciImage, options: options)
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
let cleanAperture = CMVideoFormatDescriptionGetCleanAperture(formatDescription!, false)
guard let features = allFeatures else { return }
for feature in features {
if let faceFeature = feature as? CIFaceFeature {
let faceRect = calculateFaceRect(facePosition: faceFeature.mouthPosition, faceBounds: faceFeature.bounds, clearAperture: cleanAperture)
update(with: faceRect)
}
}
if features.count == 0 {
DispatchQueue.main.async {
self.detailsView.alpha = 0.0
}
}
}
func exifOrientation(orientation: UIDeviceOrientation) -> Int {
switch orientation {
case .portraitUpsideDown:
return 8
case .landscapeLeft:
return 3
case .landscapeRight:
return 1
default:
return 6
}
}
func videoBox(frameSize: CGSize, apertureSize: CGSize) -> CGRect {
let apertureRatio = apertureSize.height/apertureSize.width
let viewRatio = frameSize.width/frameSize.height
var size = CGSize.zero
if (viewRatio > apertureRatio) {
size.width = frameSize.width
size.height = apertureSize.width * (frameSize.width/apertureSize.height)
} else {
size.width = apertureSize.height * (frameSize.height/apertureSize.width)
size.height = frameSize.height
}
var videoBox = CGRect(origin: .zero, size: size)
if (size.width < frameSize.width) {
videoBox.origin.x = (frameSize.width - size.width)/2.0
} else {
videoBox.origin.x = (size.width - frameSize.width)/2.0
}
if (size.height < frameSize.height) {
videoBox.origin.y = (frameSize.height - size.height)/2.0
} else {
videoBox.origin.y = (size.height - frameSize.height)/2.0
}
return videoBox
}
func calculateFaceRect(facePosition: CGPoint, faceBounds: CGRect, clearAperture: CGRect) -> CGRect {
let parentFrameSize = previewLayer!.frame.size
let previewBox = videoBox(frameSize: parentFrameSize, apertureSize: clearAperture.size)
var faceRect = faceBounds
swap(&faceRect.size.width, &faceRect.size.height)
swap(&faceRect.origin.x, &faceRect.origin.y)
let widthScaleBy = previewBox.size.width/clearAperture.size.height
let heightScaleBy = previewBox.size.height/clearAperture.size.width
faceRect.size.width *= widthScaleBy
faceRect.size.height *= heightScaleBy
faceRect.origin.x *= widthScaleBy
faceRect.origin.y *= heightScaleBy
faceRect = faceRect.offsetBy(dx: 0.0, dy: previewBox.origin.y)
let frame = CGRect(x: parentFrameSize.width - faceRect.origin.x - faceRect.size.width - previewBox.origin.x/2.0, y: faceRect.origin.y, width: faceRect.width, height: faceRect.height)
return frame
}
}
extension ViewController {
func update(with faceRect: CGRect) {
DispatchQueue.main.async {
UIView.animate(withDuration: 0.2) {
self.detailsView.alpha = 1.0
self.detailsView.frame = faceRect
}
}
}
}