0

我試圖捕獲使用AVCaptureSession在內存中的攝像頭視頻,以便我可以稍後將視頻數據寫入電影文件。儘管我已經能夠成功開始捕獲會話,但我無法使用AVAssetWriter將捕獲的CMSampleBuffers成功寫入壓縮影片文件。AVAssetWriterInput附加失敗,錯誤代碼-11800 AVErrorUnknown -12780

追加使用AVAssetWriterInput的append方法樣本緩衝區失敗,當我檢查AVAssetWriter的錯誤性質,我得到如下:

錯誤域= AVFoundationErrorDomain代碼= -11800「操作無法完成」的UserInfo = {NSUnderlyingError = 0x17005d070 {錯誤域= NSOSStatusErrorDomain代碼= -12780「(null)」},NSLocalizedFailureReason =發生未知錯誤(-12780),NSLocalizedDescription =操作無法完成}

據我所知--11800表示一個AVErrorUnknown,但是我一直無法找到關於-12780錯誤代碼的信息,據我所知,它是無證的。下面我粘貼了示例項目中的主要文件,我設置了這個問題。

任何指導將不勝感激。謝謝!

ViewController.swift

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue") 
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue") 
    private let session = AVCaptureSession() 
    private var backfillSampleBufferList = [CMSampleBuffer]() 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     session.sessionPreset = AVCaptureSessionPreset640x480 

     let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo); 
     let videoDeviceInput: AVCaptureDeviceInput; 

     do { 
      videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) 
     } catch { 
      print("Error creating device input from video device: \(error).") 
      return 
     } 

     guard session.canAddInput(videoDeviceInput) else { 
      print("Could not add video device input to capture session.") 
      return 
     } 

     session.addInput(videoDeviceInput) 

     let videoDataOutput = AVCaptureVideoDataOutput() 
     videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ] 
     videoDataOutput.alwaysDiscardsLateVideoFrames = true 
     videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 

     guard session.canAddOutput(videoDataOutput) else { 
      print("Could not add video data output to capture session.") 
      return 
     } 

     session.addOutput(videoDataOutput) 
     videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true 

     session.startRunning() 
    } 

    private func backfillSizeInSeconds() -> Double { 
     if backfillSampleBufferList.count < 1 { 
      return 0.0 
     } 

     let earliestSampleBuffer = backfillSampleBufferList.first! 
     let latestSampleBuffer = backfillSampleBufferList.last! 

     let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value 
     let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value 
     let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale 

     return Double(latestSampleBufferPTS - earliestSampleBufferPTS)/Double(timescale) 
    } 

    private func createClipFromBackfill() { 
     guard backfillSampleBufferList.count > 0 else { 
      print("createClipFromBackfill() called before any samples were recorded.") 
      return 
     } 

     let clipURL = URL(fileURLWithPath: 
      NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + 
      "/recorded_clip.mp4") 

     if FileManager.default.fileExists(atPath: clipURL.path) { 
      do { 
       try FileManager.default.removeItem(atPath: clipURL.path) 
      } catch { 
       print("Could not delete existing clip file: \(error).") 
      } 
     } 

     var _videoFileWriter: AVAssetWriter? 
     do { 
      _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeQuickTimeMovie) 
     } catch { 
      print("Could not create video file writer: \(error).") 
      return 
     } 

     guard let videoFileWriter = _videoFileWriter else { 
      print("Video writer was nil.") 
      return 
     } 

     let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)! 

     guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else { 
      print("Video file writer could not apply video output settings.") 
      return 
     } 

     let earliestRecordedSampleBuffer = backfillSampleBufferList.first! 

     let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer) 
     guard let formatDescription = _formatDescription else { 
      print("Earliest recording pixel buffer format description was nil.") 
      return 
     } 

     let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, 
                outputSettings: settingsAssistant.videoSettings, 
                sourceFormatHint: formatDescription) 

     guard videoFileWriter.canAdd(videoWriterInput) else { 
      print("Could not add video writer input to video file writer.") 
      return 
     } 

     videoFileWriter.add(videoWriterInput) 

     guard videoFileWriter.startWriting() else { 
      print("Video file writer not ready to write file.") 
      return 
     } 

     videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer)) 

     videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) { 
      while videoWriterInput.isReadyForMoreMediaData { 
       if self.backfillSampleBufferList.count > 0 { 
        let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy() 
        let appendSampleBufferSucceeded = videoWriterInput.append(sampleBufferToAppend) 
        if !appendSampleBufferSucceeded { 
         print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)") 
         print("Video file writer status: \(videoFileWriter.status.rawValue)") 
        } 

        self.backfillSampleBufferList.remove(at: 0) 
       } else { 
        videoWriterInput.markAsFinished() 
        videoFileWriter.finishWriting { 
         print("Saved clip to \(clipURL)") 
        } 

        break 
       } 
      } 
     } 
    } 

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     guard let buffer = sampleBuffer else { 
      print("Captured sample buffer was nil.") 
      return 
     } 

     let sampleBufferCopy = buffer.deepCopy() 

     backfillSampleBufferList.append(sampleBufferCopy) 

     if backfillSizeInSeconds() > 3.0 { 
      session.stopRunning() 
      createClipFromBackfill() 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didDrop sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     print("Sample buffer dropped.") 
    } 

} 

CVPixelBuffer + Copy.swift:

import CoreVideo 

extension CVPixelBuffer { 
    func deepCopy() -> CVPixelBuffer { 
     precondition(CFGetTypeID(self) == CVPixelBufferGetTypeID(), "deepCopy() cannot copy a non-CVPixelBuffer") 

     var _copy : CVPixelBuffer? 
     CVPixelBufferCreate(
      nil, 
      CVPixelBufferGetWidth(self), 
      CVPixelBufferGetHeight(self), 
      CVPixelBufferGetPixelFormatType(self), 
      CVBufferGetAttachments(self, CVAttachmentMode.shouldPropagate), 
      &_copy) 

     guard let copy = _copy else { 
      print("Pixel buffer copy was nil.") 
      fatalError() 
     } 

     CVBufferPropagateAttachments(self, copy) 
     CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags.readOnly) 
     CVPixelBufferLockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0)) 

     let sourceBaseAddress = CVPixelBufferGetBaseAddress(self) 
     let copyBaseAddress = CVPixelBufferGetBaseAddress(copy) 
     memcpy(copyBaseAddress, sourceBaseAddress, CVPixelBufferGetHeight(self) * CVPixelBufferGetBytesPerRow(self)) 

     CVPixelBufferUnlockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0)) 
     CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags.readOnly) 

     return copy 
    } 
} 

CMSampleBuffer + Copy.swift:

import CoreMedia 

extension CMSampleBuffer { 
    func deepCopy() -> CMSampleBuffer { 
     let _pixelBuffer = CMSampleBufferGetImageBuffer(self) 
     guard let pixelBuffer = _pixelBuffer else { 
      print("Pixel buffer to copy was nil.") 
      fatalError() 
     } 
     let pixelBufferCopy = pixelBuffer.deepCopy() 

     let _formatDescription = CMSampleBufferGetFormatDescription(self) 
     guard let formatDescription = _formatDescription else { 
      print("Format description to copy was nil.") 
      fatalError() 
     } 

     var timingInfo = kCMTimingInfoInvalid 
     let getTimingInfoResult = CMSampleBufferGetSampleTimingInfo(self, 0, &timingInfo) 
     guard getTimingInfoResult == noErr else { 
      print("Could not get timing info to copy: \(getTimingInfoResult).") 
      fatalError() 
     } 

     timingInfo.presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(self) 

     var _copy : CMSampleBuffer? 
     let createCopyResult = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, 
                    pixelBufferCopy, 
                    true, 
                    nil, 
                    nil, 
                    formatDescription, 
                    &timingInfo, 
                    &_copy); 

     guard createCopyResult == noErr else { 
      print("Error creating copy of sample buffer: \(createCopyResult).") 
      fatalError() 
     } 

     guard let copy = _copy else { 
      print("Copied sample buffer was nil.") 
      fatalError() 
     } 

     return copy 
    } 
} 

回答

0

更多的研究和實驗後,出現使用AVAssetWriterInputPixelBufferAdaptor將我要存儲的CMSampleBuffers的CVPixelBuffers附加到AV AssetWriterInput工作時不會產生錯誤。

下面是使用AVAssetWriterInputPixelBufferAdaptor添加像素緩衝區的ViewController.swift實現的修改版本。

ViewController.swift

import UIKit 
import AVFoundation 
import Photos 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue") 
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue") 
    private let session = AVCaptureSession() 
    private var backfillSampleBufferList = [CMSampleBuffer]() 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     session.sessionPreset = AVCaptureSessionPreset640x480 

     let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo); 
     let videoDeviceInput: AVCaptureDeviceInput; 

     do { 
      videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) 
     } catch { 
      print("Error creating device input from video device: \(error).") 
      return 
     } 

     guard session.canAddInput(videoDeviceInput) else { 
      print("Could not add video device input to capture session.") 
      return 
     } 

     session.addInput(videoDeviceInput) 

     let videoDataOutput = AVCaptureVideoDataOutput() 
     videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ] 
     videoDataOutput.alwaysDiscardsLateVideoFrames = true 
     videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 

     guard session.canAddOutput(videoDataOutput) else { 
      print("Could not add video data output to capture session.") 
      return 
     } 

     session.addOutput(videoDataOutput) 
     videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true 

     session.startRunning() 
    } 

    private func backfillSizeInSeconds() -> Double { 
     if backfillSampleBufferList.count < 1 { 
      return 0.0 
     } 

     let earliestSampleBuffer = backfillSampleBufferList.first! 
     let latestSampleBuffer = backfillSampleBufferList.last! 

     let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value 
     let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value 
     let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale 

     return Double(latestSampleBufferPTS - earliestSampleBufferPTS)/Double(timescale) 
    } 

    private func createClipFromBackfill() { 
     guard backfillSampleBufferList.count > 0 else { 
      print("createClipFromBackfill() called before any samples were recorded.") 
      return 
     } 

     let clipURL = URL(fileURLWithPath: 
      NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + 
      "/recorded_clip.mp4") 

     if FileManager.default.fileExists(atPath: clipURL.path) { 
      do { 
       try FileManager.default.removeItem(atPath: clipURL.path) 
      } catch { 
       print("Could not delete existing clip file: \(error).") 
      } 
     } 

     var _videoFileWriter: AVAssetWriter? 
     do { 
      _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeMPEG4) 
     } catch { 
      print("Could not create video file writer: \(error).") 
      return 
     } 

     guard let videoFileWriter = _videoFileWriter else { 
      print("Video writer was nil.") 
      return 
     } 

     let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)! 

     guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else { 
      print("Video file writer could not apply video output settings.") 
      return 
     } 

     let earliestRecordedSampleBuffer = backfillSampleBufferList.first! 

     let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer) 
     guard let formatDescription = _formatDescription else { 
      print("Earliest recording pixel buffer format description was nil.") 
      return 
     } 

     let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, 
                outputSettings: settingsAssistant.videoSettings, 
                sourceFormatHint: formatDescription) 

     guard videoFileWriter.canAdd(videoWriterInput) else { 
      print("Could not add video writer input to video file writer.") 
      return 
     } 

     videoFileWriter.add(videoWriterInput) 

     let pixelAdapterBufferAttributes = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCMPixelFormat_32BGRA) ] 
     let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, 
                   sourcePixelBufferAttributes: pixelAdapterBufferAttributes) 

     guard videoFileWriter.startWriting() else { 
      print("Video file writer not ready to write file.") 
      return 
     } 

     videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer)) 

     videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) { 
      while videoWriterInput.isReadyForMoreMediaData { 
       if self.backfillSampleBufferList.count > 0 { 
        let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy() 
        let appendSampleBufferSucceeded = pixelAdapter.append(CMSampleBufferGetImageBuffer(sampleBufferToAppend)!, 
                      withPresentationTime: CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferToAppend)) 
        if !appendSampleBufferSucceeded { 
         print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)") 
         print("Video file writer status: \(videoFileWriter.status.rawValue)") 
        } 

        self.backfillSampleBufferList.remove(at: 0) 
       } else { 
        videoWriterInput.markAsFinished() 
        videoFileWriter.finishWriting { 
         print("Saving clip to \(clipURL)") 
        } 

        break 
       } 
      } 
     } 
    } 

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     guard let buffer = sampleBuffer else { 
      print("Captured sample buffer was nil.") 
      return 
     } 

     let sampleBufferCopy = buffer.deepCopy() 

     backfillSampleBufferList.append(sampleBufferCopy) 

     if backfillSizeInSeconds() > 3.0 { 
      session.stopRunning() 
      createClipFromBackfill() 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didDrop sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     print("Sample buffer dropped.") 
    } 

} 
0

我也遇到了這個試圖合成視頻。我終於明白,-[AVAssetWriterInput appendSampleBuffer:]只適用於設備(​​反正是iOS 11.2.6),如果底層像素緩衝區支持IOSurface

如果修改CVPixelBuffer.deepCopy()方法包括在屬性(id)kCVPixelBufferIOSurfacePropertiesKey: @{}鍵值對字典傳遞給CVPixelBufferCreate,它可能會工作。

相關問題