2017-07-18 5 views
2

Ich bin neu zu schnell und versuche, eine Kamera-App zu erstellen, die Echtzeit-Filter anwenden und mit den angewandten Filtern speichern kann.Aufnehmen von Videos mit Echtzeit-Filtern in Swift

Bis jetzt kann ich eine Vorschau mit den angewandten Filtern in Echtzeit sehen, aber wenn ich das Video speichere, ist alles schwarz.

import UIKit 
import AVFoundation 
import AssetsLibrary 
import CoreMedia 
import Photos 

class ViewController: UIViewController , AVCaptureVideoDataOutputSampleBufferDelegate { 

    var captureSession: AVCaptureSession! 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet weak var recordButtton: UIButton! 
    @IBOutlet weak var imageView: UIImageView! 

    var assetWriter: AVAssetWriter? 
    var assetWriterPixelBufferInput: AVAssetWriterInputPixelBufferAdaptor? 
    var isWriting = false 
    var currentSampleTime: CMTime? 
    var currentVideoDimensions: CMVideoDimensions? 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     FilterVendor.register() 
     setupCaptureSession() 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
    } 

    func setupCaptureSession() { 
     let captureSession = AVCaptureSession() 
     captureSession.sessionPreset = AVCaptureSessionPresetPhoto 

     guard let captureDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo), let input = try? AVCaptureDeviceInput(device: captureDevice) else { 
      print("Can't access the camera") 
      return 
     } 

     if captureSession.canAddInput(input) { 
      captureSession.addInput(input) 
     } 

     let videoOutput = AVCaptureVideoDataOutput() 

     videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue.main) 
     if captureSession.canAddOutput(videoOutput) { 
      captureSession.addOutput(videoOutput) 
     } 

     let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
     if((previewLayer) != nil) { 
      view.layer.addSublayer(previewLayer!) 
     } 

     captureSession.startRunning() 
    } 

    @IBAction func record(_ sender: Any) { 
     if isWriting { 
      print("stop record") 
      self.isWriting = false 
      assetWriterPixelBufferInput = nil 
      assetWriter?.finishWriting(completionHandler: {[unowned self]() -> Void in 
       self.saveMovieToCameraRoll() 
      }) 
     } else { 
      print("start record") 
      createWriter() 
      assetWriter?.startWriting() 
      assetWriter?.startSession(atSourceTime: currentSampleTime!) 
      isWriting = true 
     } 
    } 

    func saveMovieToCameraRoll() { 
     PHPhotoLibrary.shared().performChanges({ 
      PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: self.movieURL() as URL) 
     }) { saved, error in 
      if saved { 
       print("saved") 
      } 
     } 
    } 

    func movieURL() -> NSURL { 
     let tempDir = NSTemporaryDirectory() 
     let url = NSURL(fileURLWithPath: tempDir).appendingPathComponent("tmpMov.mov") 
     return url! as NSURL 
    } 

    func checkForAndDeleteFile() { 
     let fm = FileManager.default 
     let url = movieURL() 
     let exist = fm.fileExists(atPath: url.path!) 

     if exist { 
      do { 
       try fm.removeItem(at: url as URL) 
      } catch let error as NSError { 
       print(error.localizedDescription) 
      } 
     } 
    } 

    func createWriter() { 
     self.checkForAndDeleteFile() 

     do { 
      assetWriter = try AVAssetWriter(outputURL: movieURL() as URL, fileType: AVFileTypeQuickTimeMovie) 
     } catch let error as NSError { 
      print(error.localizedDescription) 
      return 
     } 

     let outputSettings = [ 
      AVVideoCodecKey : AVVideoCodecH264, 
      AVVideoWidthKey : Int(currentVideoDimensions!.width), 
      AVVideoHeightKey : Int(currentVideoDimensions!.height) 
     ] as [String : Any] 

     let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings as? [String : AnyObject]) 
     assetWriterVideoInput.expectsMediaDataInRealTime = true 
     assetWriterVideoInput.transform = CGAffineTransform(rotationAngle: CGFloat(M_PI/2.0)) 

     let sourcePixelBufferAttributesDictionary = [ 
      String(kCVPixelBufferPixelFormatTypeKey) : Int(kCVPixelFormatType_32BGRA), 
      String(kCVPixelBufferWidthKey) : Int(currentVideoDimensions!.width), 
      String(kCVPixelBufferHeightKey) : Int(currentVideoDimensions!.height), 
      String(kCVPixelFormatOpenGLESCompatibility) : kCFBooleanTrue 
     ] as [String : Any] 

     assetWriterPixelBufferInput = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: assetWriterVideoInput, 
                      sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary) 

     if assetWriter!.canAdd(assetWriterVideoInput) { 
      assetWriter!.add(assetWriterVideoInput) 
     } else { 
      print("no way\(assetWriterVideoInput)") 
     } 
    } 

    func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
     autoreleasepool { 

      connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

      guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 
      let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

      let filter = CIFilter(name: "Fİlter")! 
      filter.setValue(cameraImage, forKey: kCIInputImageKey) 


      let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
      self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
      self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

      if self.isWriting { 
       if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
        var newPixelBuffer: CVPixelBuffer? = nil 

        CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

        let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

        if success == false { 
         print("Pixel Buffer failed") 
        } 
       } 
      } 

      DispatchQueue.main.async { 

       if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
        let filteredImage = UIImage(ciImage: outputValue) 
        self.imageView.image = filteredImage 
       } 
      } 
     } 
    } 
} 
+0

Haben Sie versucht, ein Video ohne Filter zu speichern? – Simon

+0

@Simon kein Unterschied :( – hackio

Antwort

4

Ich habe einige Kommentare zu dem kritischen Teil unten hinzugefügt:

func captureOutput(_ captureOutput: AVCaptureOutput, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection) { 
    autoreleasepool { 

     connection.videoOrientation = AVCaptureVideoOrientation.landscapeLeft; 

     // COMMENT: This line makes sense - this is your pixelbuffer from the camera. 
     guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } 

     // COMMENT: OK, so you turn pixelBuffer into a CIImage... 
     let cameraImage = CIImage(cvPixelBuffer: pixelBuffer) 

     // COMMENT: And now you've create a CIImage with a Filter instruction... 
     let filter = CIFilter(name: "Fİlter")! 
     filter.setValue(cameraImage, forKey: kCIInputImageKey) 


     let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)! 
     self.currentVideoDimensions = CMVideoFormatDescriptionGetDimensions(formatDescription) 
     self.currentSampleTime = CMSampleBufferGetOutputPresentationTimeStamp(sampleBuffer) 

     if self.isWriting { 
      if self.assetWriterPixelBufferInput?.assetWriterInput.isReadyForMoreMediaData == true { 
       // COMMENT: Here's where it gets weird. You've declared a new, empty pixelBuffer... but you already have one (pixelBuffer) that contains the image you want to write... 
       var newPixelBuffer: CVPixelBuffer? = nil 

       // COMMENT: And you grabbed memory from the pool. 
       CVPixelBufferPoolCreatePixelBuffer(nil, self.assetWriterPixelBufferInput!.pixelBufferPool!, &newPixelBuffer) 

       // COMMENT: And now you wrote an empty pixelBuffer back <-- this is what's causing the black frame. 
       let success = self.assetWriterPixelBufferInput?.append(newPixelBuffer!, withPresentationTime: self.currentSampleTime!) 

       if success == false { 
        print("Pixel Buffer failed") 
       } 
      } 
     } 

     // COMMENT: And now you're sending the filtered image back to the screen. 
     DispatchQueue.main.async { 

      if let outputValue = filter.value(forKey: kCIOutputImageKey) as? CIImage { 
       let filteredImage = UIImage(ciImage: outputValue) 
       self.imageView.image = filteredImage 
      } 
     } 
    } 
} 

Es sieht für mich wie Sie im Grunde das Bildschirmbild bekommen, um eine gefilterte Kopie zu erstellen, dann wird ein neues Pixel machen Puffer, der leer ist und das ausschreiben.

Wenn Sie den pixelBuffer schreiben, den Sie anstelle des neuen erstellt haben, sollten Sie das Bild erfolgreich schreiben.

Was Sie brauchen, um das gefilterte Video erfolgreich zu schreiben, ist, einen neuen CVPixelBuffer von einem CImage zu erstellen - diese Lösung existiert bereits hier auf StackOverflow, ich weiß, weil ich diesen Schritt selbst gebraucht habe!

+0

Nicht schnell basiert, aber hier ist der relevante Objective Code aus ein paar Jahren zurück https://stackoverflow.com/questions/22819337/adding-filters-to-video-with-avfoundation-osx- how-do-ich-schreibe-das-Ergebnis-ich –

+0

Danke Tim :) – hackio

Verwandte Themen