0

Ich versuche, Kamera-Video im Speicher mit AVCaptureSession zu erfassen, damit ich später die Videodaten in eine Filmdatei schreiben kann. Obwohl ich eine Capture-Sitzung erfolgreich starten konnte, kann ich die von mir erfassten CMSampleBuffers nicht erfolgreich mit AVAssetWriter in eine komprimierte Filmdatei schreiben.AVAssetWriterInput Append schlägt mit Fehlercode -11800 fehl AVErrorUnknown -12780

Anfügen Abtastpuffern AVAssetWriterInput der Append-Methode schlägt fehl, und wenn ich die Fehler Unterkunft AVAssetWriter prüfen, erhalte ich die folgende:

Fehler Domain = AVFoundationErrorDomain-Code = -11.800 „Der Vorgang konnte nicht abgeschlossen werden“ Userinfo = {NSUnderlyingError = 0x17005d070 {Fehler Domain = NSOSStatusErrorDomain-Code = -12.780 "(null)"}, = NSLocalizedFailureReason Ein unbekannter Fehler ist aufgetreten (-12.780), NSLocalizedDescription = Der Vorgang konnte nicht abgeschlossen werden}

Soweit ich sagen kann, -11800 zeigt einen AVErrorUnknown an, jedoch konnte ich keine Informationen über den Fehlercode -12780 finden, der, soweit ich feststellen kann, nicht dokumentiert ist . Im Folgenden habe ich die Hauptdateien in das Beispielprojekt eingefügt, das ich eingerichtet habe, um das Problem zu demonstrieren.

Jede Anleitung würde sehr geschätzt werden. Vielen Dank!

ViewController.swift

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue") 
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue") 
    private let session = AVCaptureSession() 
    private var backfillSampleBufferList = [CMSampleBuffer]() 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     session.sessionPreset = AVCaptureSessionPreset640x480 

     let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo); 
     let videoDeviceInput: AVCaptureDeviceInput; 

     do { 
      videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) 
     } catch { 
      print("Error creating device input from video device: \(error).") 
      return 
     } 

     guard session.canAddInput(videoDeviceInput) else { 
      print("Could not add video device input to capture session.") 
      return 
     } 

     session.addInput(videoDeviceInput) 

     let videoDataOutput = AVCaptureVideoDataOutput() 
     videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ] 
     videoDataOutput.alwaysDiscardsLateVideoFrames = true 
     videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 

     guard session.canAddOutput(videoDataOutput) else { 
      print("Could not add video data output to capture session.") 
      return 
     } 

     session.addOutput(videoDataOutput) 
     videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true 

     session.startRunning() 
    } 

    private func backfillSizeInSeconds() -> Double { 
     if backfillSampleBufferList.count < 1 { 
      return 0.0 
     } 

     let earliestSampleBuffer = backfillSampleBufferList.first! 
     let latestSampleBuffer = backfillSampleBufferList.last! 

     let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value 
     let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value 
     let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale 

     return Double(latestSampleBufferPTS - earliestSampleBufferPTS)/Double(timescale) 
    } 

    private func createClipFromBackfill() { 
     guard backfillSampleBufferList.count > 0 else { 
      print("createClipFromBackfill() called before any samples were recorded.") 
      return 
     } 

     let clipURL = URL(fileURLWithPath: 
      NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + 
      "/recorded_clip.mp4") 

     if FileManager.default.fileExists(atPath: clipURL.path) { 
      do { 
       try FileManager.default.removeItem(atPath: clipURL.path) 
      } catch { 
       print("Could not delete existing clip file: \(error).") 
      } 
     } 

     var _videoFileWriter: AVAssetWriter? 
     do { 
      _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeQuickTimeMovie) 
     } catch { 
      print("Could not create video file writer: \(error).") 
      return 
     } 

     guard let videoFileWriter = _videoFileWriter else { 
      print("Video writer was nil.") 
      return 
     } 

     let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)! 

     guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else { 
      print("Video file writer could not apply video output settings.") 
      return 
     } 

     let earliestRecordedSampleBuffer = backfillSampleBufferList.first! 

     let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer) 
     guard let formatDescription = _formatDescription else { 
      print("Earliest recording pixel buffer format description was nil.") 
      return 
     } 

     let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, 
                outputSettings: settingsAssistant.videoSettings, 
                sourceFormatHint: formatDescription) 

     guard videoFileWriter.canAdd(videoWriterInput) else { 
      print("Could not add video writer input to video file writer.") 
      return 
     } 

     videoFileWriter.add(videoWriterInput) 

     guard videoFileWriter.startWriting() else { 
      print("Video file writer not ready to write file.") 
      return 
     } 

     videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer)) 

     videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) { 
      while videoWriterInput.isReadyForMoreMediaData { 
       if self.backfillSampleBufferList.count > 0 { 
        let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy() 
        let appendSampleBufferSucceeded = videoWriterInput.append(sampleBufferToAppend) 
        if !appendSampleBufferSucceeded { 
         print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)") 
         print("Video file writer status: \(videoFileWriter.status.rawValue)") 
        } 

        self.backfillSampleBufferList.remove(at: 0) 
       } else { 
        videoWriterInput.markAsFinished() 
        videoFileWriter.finishWriting { 
         print("Saved clip to \(clipURL)") 
        } 

        break 
       } 
      } 
     } 
    } 

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     guard let buffer = sampleBuffer else { 
      print("Captured sample buffer was nil.") 
      return 
     } 

     let sampleBufferCopy = buffer.deepCopy() 

     backfillSampleBufferList.append(sampleBufferCopy) 

     if backfillSizeInSeconds() > 3.0 { 
      session.stopRunning() 
      createClipFromBackfill() 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didDrop sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     print("Sample buffer dropped.") 
    } 

} 

CVPixelBuffer + Copy.swift:

import CoreVideo 

extension CVPixelBuffer { 
    func deepCopy() -> CVPixelBuffer { 
     precondition(CFGetTypeID(self) == CVPixelBufferGetTypeID(), "deepCopy() cannot copy a non-CVPixelBuffer") 

     var _copy : CVPixelBuffer? 
     CVPixelBufferCreate(
      nil, 
      CVPixelBufferGetWidth(self), 
      CVPixelBufferGetHeight(self), 
      CVPixelBufferGetPixelFormatType(self), 
      CVBufferGetAttachments(self, CVAttachmentMode.shouldPropagate), 
      &_copy) 

     guard let copy = _copy else { 
      print("Pixel buffer copy was nil.") 
      fatalError() 
     } 

     CVBufferPropagateAttachments(self, copy) 
     CVPixelBufferLockBaseAddress(self, CVPixelBufferLockFlags.readOnly) 
     CVPixelBufferLockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0)) 

     let sourceBaseAddress = CVPixelBufferGetBaseAddress(self) 
     let copyBaseAddress = CVPixelBufferGetBaseAddress(copy) 
     memcpy(copyBaseAddress, sourceBaseAddress, CVPixelBufferGetHeight(self) * CVPixelBufferGetBytesPerRow(self)) 

     CVPixelBufferUnlockBaseAddress(copy, CVPixelBufferLockFlags(rawValue: 0)) 
     CVPixelBufferUnlockBaseAddress(self, CVPixelBufferLockFlags.readOnly) 

     return copy 
    } 
} 

CMSampleBuffer + Copy.swift:

import CoreMedia 

extension CMSampleBuffer { 
    func deepCopy() -> CMSampleBuffer { 
     let _pixelBuffer = CMSampleBufferGetImageBuffer(self) 
     guard let pixelBuffer = _pixelBuffer else { 
      print("Pixel buffer to copy was nil.") 
      fatalError() 
     } 
     let pixelBufferCopy = pixelBuffer.deepCopy() 

     let _formatDescription = CMSampleBufferGetFormatDescription(self) 
     guard let formatDescription = _formatDescription else { 
      print("Format description to copy was nil.") 
      fatalError() 
     } 

     var timingInfo = kCMTimingInfoInvalid 
     let getTimingInfoResult = CMSampleBufferGetSampleTimingInfo(self, 0, &timingInfo) 
     guard getTimingInfoResult == noErr else { 
      print("Could not get timing info to copy: \(getTimingInfoResult).") 
      fatalError() 
     } 

     timingInfo.presentationTimeStamp = CMSampleBufferGetOutputPresentationTimeStamp(self) 

     var _copy : CMSampleBuffer? 
     let createCopyResult = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, 
                    pixelBufferCopy, 
                    true, 
                    nil, 
                    nil, 
                    formatDescription, 
                    &timingInfo, 
                    &_copy); 

     guard createCopyResult == noErr else { 
      print("Error creating copy of sample buffer: \(createCopyResult).") 
      fatalError() 
     } 

     guard let copy = _copy else { 
      print("Copied sample buffer was nil.") 
      fatalError() 
     } 

     return copy 
    } 
} 

Antwort

0

Nach mehr Forschung und Experimente scheint es AVAssetWriterInputPixelBufferAdaptor mit um die CVPixelBuffers der CMSampleBuffers anzuhängen, die ich auf dem AV speichere AssetWriterInput funktioniert, ohne einen Fehler zu generieren.

Unten ist die modifizierte Version der ViewController.swift-Implementierung, die AVAssetWriterInputPixelBufferAdaptor verwendet, um Pixelpuffer anzufügen.

ViewController.swift

import UIKit 
import AVFoundation 
import Photos 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { 

    private let recordingClipQueue = DispatchQueue(label: "com.example.recordingClipQueue") 
    private let videoDataOutputQueue = DispatchQueue(label: "com.example.videoDataOutputQueue") 
    private let session = AVCaptureSession() 
    private var backfillSampleBufferList = [CMSampleBuffer]() 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     session.sessionPreset = AVCaptureSessionPreset640x480 

     let videoDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo); 
     let videoDeviceInput: AVCaptureDeviceInput; 

     do { 
      videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice) 
     } catch { 
      print("Error creating device input from video device: \(error).") 
      return 
     } 

     guard session.canAddInput(videoDeviceInput) else { 
      print("Could not add video device input to capture session.") 
      return 
     } 

     session.addInput(videoDeviceInput) 

     let videoDataOutput = AVCaptureVideoDataOutput() 
     videoDataOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as NSString : Int(kCMPixelFormat_32BGRA) ] 
     videoDataOutput.alwaysDiscardsLateVideoFrames = true 
     videoDataOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue) 

     guard session.canAddOutput(videoDataOutput) else { 
      print("Could not add video data output to capture session.") 
      return 
     } 

     session.addOutput(videoDataOutput) 
     videoDataOutput.connection(withMediaType: AVMediaTypeVideo).isEnabled = true 

     session.startRunning() 
    } 

    private func backfillSizeInSeconds() -> Double { 
     if backfillSampleBufferList.count < 1 { 
      return 0.0 
     } 

     let earliestSampleBuffer = backfillSampleBufferList.first! 
     let latestSampleBuffer = backfillSampleBufferList.last! 

     let earliestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(earliestSampleBuffer).value 
     let latestSampleBufferPTS = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).value 
     let timescale = CMSampleBufferGetOutputPresentationTimeStamp(latestSampleBuffer).timescale 

     return Double(latestSampleBufferPTS - earliestSampleBufferPTS)/Double(timescale) 
    } 

    private func createClipFromBackfill() { 
     guard backfillSampleBufferList.count > 0 else { 
      print("createClipFromBackfill() called before any samples were recorded.") 
      return 
     } 

     let clipURL = URL(fileURLWithPath: 
      NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + 
      "/recorded_clip.mp4") 

     if FileManager.default.fileExists(atPath: clipURL.path) { 
      do { 
       try FileManager.default.removeItem(atPath: clipURL.path) 
      } catch { 
       print("Could not delete existing clip file: \(error).") 
      } 
     } 

     var _videoFileWriter: AVAssetWriter? 
     do { 
      _videoFileWriter = try AVAssetWriter(url: clipURL, fileType: AVFileTypeMPEG4) 
     } catch { 
      print("Could not create video file writer: \(error).") 
      return 
     } 

     guard let videoFileWriter = _videoFileWriter else { 
      print("Video writer was nil.") 
      return 
     } 

     let settingsAssistant = AVOutputSettingsAssistant(preset: AVOutputSettingsPreset640x480)! 

     guard videoFileWriter.canApply(outputSettings: settingsAssistant.videoSettings, forMediaType: AVMediaTypeVideo) else { 
      print("Video file writer could not apply video output settings.") 
      return 
     } 

     let earliestRecordedSampleBuffer = backfillSampleBufferList.first! 

     let _formatDescription = CMSampleBufferGetFormatDescription(earliestRecordedSampleBuffer) 
     guard let formatDescription = _formatDescription else { 
      print("Earliest recording pixel buffer format description was nil.") 
      return 
     } 

     let videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, 
                outputSettings: settingsAssistant.videoSettings, 
                sourceFormatHint: formatDescription) 

     guard videoFileWriter.canAdd(videoWriterInput) else { 
      print("Could not add video writer input to video file writer.") 
      return 
     } 

     videoFileWriter.add(videoWriterInput) 

     let pixelAdapterBufferAttributes = [ kCVPixelBufferPixelFormatTypeKey as String : Int(kCMPixelFormat_32BGRA) ] 
     let pixelAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoWriterInput, 
                   sourcePixelBufferAttributes: pixelAdapterBufferAttributes) 

     guard videoFileWriter.startWriting() else { 
      print("Video file writer not ready to write file.") 
      return 
     } 

     videoFileWriter.startSession(atSourceTime: CMSampleBufferGetOutputPresentationTimeStamp(earliestRecordedSampleBuffer)) 

     videoWriterInput.requestMediaDataWhenReady(on: recordingClipQueue) { 
      while videoWriterInput.isReadyForMoreMediaData { 
       if self.backfillSampleBufferList.count > 0 { 
        let sampleBufferToAppend = self.backfillSampleBufferList.first!.deepCopy() 
        let appendSampleBufferSucceeded = pixelAdapter.append(CMSampleBufferGetImageBuffer(sampleBufferToAppend)!, 
                      withPresentationTime: CMSampleBufferGetOutputPresentationTimeStamp(sampleBufferToAppend)) 
        if !appendSampleBufferSucceeded { 
         print("Failed to append sample buffer to asset writer input: \(videoFileWriter.error!)") 
         print("Video file writer status: \(videoFileWriter.status.rawValue)") 
        } 

        self.backfillSampleBufferList.remove(at: 0) 
       } else { 
        videoWriterInput.markAsFinished() 
        videoFileWriter.finishWriting { 
         print("Saving clip to \(clipURL)") 
        } 

        break 
       } 
      } 
     } 
    } 

    // MARK: AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     guard let buffer = sampleBuffer else { 
      print("Captured sample buffer was nil.") 
      return 
     } 

     let sampleBufferCopy = buffer.deepCopy() 

     backfillSampleBufferList.append(sampleBufferCopy) 

     if backfillSizeInSeconds() > 3.0 { 
      session.stopRunning() 
      createClipFromBackfill() 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput!, 
         didDrop sampleBuffer: CMSampleBuffer!, 
         from connection: AVCaptureConnection!) { 
     print("Sample buffer dropped.") 
    } 

} 
0

Ich lief auch in dieser beim Versuch, Videos zu synthetisieren. Ich fand schließlich heraus, dass -[AVAssetWriterInput appendSampleBuffer:] funktioniert nur auf Gerät (wie von iOS 11.2.6 sowieso), wenn der zugrunde liegende Pixel-Puffer durch eine IOSurface unterstützt wird.

Wenn Sie Ihre CVPixelBuffer.deepCopy() Methode ändern Sie die (id)kCVPixelBufferIOSurfacePropertiesKey: @{} Schlüssel-Wert-Paar in den Attributen zu CVPixelBufferCreate passieren Wörterbuch, das Sie sind, wird es wahrscheinlich funktionieren.

Verwandte Themen