2017-06-11 6 views
0

Ich versuche, eine SnapChat-ähnliche App zu machen. Über die gleiche Schaltfläche kann ich dem Benutzer erlauben, Bilder aufzunehmen (innen aufzuräumen) und Videos aufzuzeichnen (langes Drücken).Wie kann ich Fotos und Videos von derselben AVCaptureSession aufnehmen?

Ich benutze AVFoundation dafür. Der schwierige Teil ist, dass ich es nicht in der gleichen AVCaptureSession richtig arbeiten lassen kann. Ich meine, ich habe nur eine Vorschau-Ebene für beide Aufnahmen, wie kann ich die richtige starten, abhängig von den Interaktionen des Benutzers mit der Aufnahmetaste? Hat jemand schon mit etwas ähnlichem gearbeitet?

Hier ist ein Stück von meinem Code:

import UIKit 
import AVFoundation 

protocol RecordCameraDelegate { 
    func didSavedOutputFile(url: URL!, error: Error?) 
    func didSavedImage(image: UIImage?) 
} 

// MARK: - Camera 
class RecordCamera : NSObject { 

    var videoLayer : AVCaptureVideoPreviewLayer! 
    var delegate : RecordCameraDelegate! 
    var capturedPhoto : UIImage? 

    fileprivate var captureSession = AVCaptureSession() 
    fileprivate var photoSession = AVCaptureSession() 

    fileprivate var movieOutput = AVCaptureMovieFileOutput() 
    fileprivate var cameraDevice : AVCaptureDevicePosition! 
    fileprivate let stillImageOutput = AVCaptureStillImageOutput() 

    // Devices 
    fileprivate lazy var frontCameraDevice: AVCaptureDevice? = { 
     let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice] 
     return devices.filter { $0.position == .front }.first 
    }() 

    fileprivate lazy var backCameraDevice: AVCaptureDevice? = { 
     let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) as! [AVCaptureDevice] 
     return devices.filter { $0.position == .back }.first 
    }() 

    fileprivate lazy var micDevice: AVCaptureDevice? = { 
     return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 
    }() 

    fileprivate var tempFilePath: URL = { 
     let tempPath = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("bighug").appendingPathExtension("mp4").absoluteString 
     if FileManager.default.fileExists(atPath: tempPath) { 
      do { 
       try FileManager.default.removeItem(atPath: tempPath) 
      } catch let error { print("Can't create File URL: \(String(describing: error))") } 
     } 
     return URL(string: tempPath)! 
    }() 

    // MARK: - Initialization 
    init(view: UIView, cameraPosition: AVCaptureDevicePosition = .front) { 
     super.init() 

     cameraDevice = cameraPosition 

     // Video 
     self.configureToRecord(view: view) 
     // Photo 
     self.configureToCapturePhoto() 
    } 

    func configureToRecord(view: UIView? = nil) { 

     captureSession.beginConfiguration() 
     defer { 
      // commit & stop session 
      captureSession.commitConfiguration() 
      if !captureSession.isRunning { captureSession.startRunning() } 
     } 

     captureSession.sessionPreset = AVCaptureSessionPresetHigh 

     // Start configuration 
     if !captureSession.isRunning { 

      // layer 
      if let validView = view { 
       videoLayer = AVCaptureVideoPreviewLayer(session: captureSession) 
       videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill 
       videoLayer.frame = validView.bounds 
       validView.layer.addSublayer(videoLayer) 
      } 

      // add device inputs (front camera and mic) 
      if cameraDevice == .front { 
       captureSession.addInput(deviceInputFrom(device: frontCameraDevice)) 
      } else { 
       captureSession.addInput(deviceInputFrom(device: backCameraDevice)) 
      } 
     } 

     captureSession.addInput(deviceInputFrom(device: micDevice)) 

     // Output 
     movieOutput.movieFragmentInterval = kCMTimeInvalid 

     // Remove previous output 
     if let existingOutput = captureSession.outputs.first as? AVCaptureOutput { 
      captureSession.removeOutput(existingOutput) 
     } 
     // Add Movie Output 
     if captureSession.canAddOutput(movieOutput) { 
      captureSession.addOutput(movieOutput) 
     } 
    } 

    func configureToCapturePhoto() { 

     photoSession.beginConfiguration() 
     defer { photoSession.commitConfiguration() } 

     photoSession.sessionPreset = AVCaptureSessionPresetPhoto 
     stillImageOutput.outputSettings = [AVVideoCodecKey:AVVideoCodecJPEG] 

     if #available(iOS 10.0, *) { 
      let cameraOutput = AVCapturePhotoOutput() 
      // Add Photo Output 
      if photoSession.canAddOutput(cameraOutput) { 
       photoSession.addOutput(cameraOutput) 
      } 
     } 
     else { 
      // Add Photo Output 
      if photoSession.canAddOutput(stillImageOutput) { 
       photoSession.addOutput(stillImageOutput) 
      } 
     } 
    } 

    func takePicture() { 
     if #available(iOS 10.0, *) { 
      let cameraOutput = photoSession.outputs.first as! AVCapturePhotoOutput 
      // Capture Picture 
      let settings = AVCapturePhotoSettings() 
      let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first! 
      let previewFormat = [ 
       kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, 
       kCVPixelBufferWidthKey as String: 828, 
       kCVPixelBufferHeightKey as String: 828 
      ] 
      settings.previewPhotoFormat = previewFormat 
      cameraOutput.capturePhoto(with: settings, delegate: self) 
     } 
     else { 
      if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { 
       stillImageOutput.captureStillImageAsynchronously(from: videoConnection) { (imageDataSampleBuffer, error) -> Void in 
        let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(imageDataSampleBuffer) 
        //UIImageWriteToSavedPhotosAlbum(UIImage(data: imageData!)!, nil, nil, nil) 
        guard let validData = imageData else { self.delegate?.didSavedImage(image: nil); return } 
        self.capturedPhoto = UIImage(data: validData) 
       } 
      } 
     } 
    } 

    // MARK: - Record Methods 
    func startRecording() { 
     // Take picture 
     print("Camera started recording") 
     self.takePicture() 
     // Start recording 
     movieOutput.startRecording(
      toOutputFileURL: tempFilePath, 
      recordingDelegate: self 
     ) 
    } 

    func stopRecording() { 
     print("Camera stopped recording") 
     movieOutput.stopRecording() 
    } 

    // MARK: - Modes 
    func cameraMode() { 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // From 
     if cameraDevice == .front { 
      if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) { 
       if !inputs.contains(validFrontDevice) { 
        captureSession.addInput(validFrontDevice) 
       } 
      } 
     } 
     // Back 
     if cameraDevice == .back { 
      if let validBackDevice = deviceInputFrom(device: backCameraDevice) { 
       if !inputs.contains(validBackDevice) { 
        captureSession.addInput(validBackDevice) 
       } 
      } 
     } 

     print("Record Camera --> Set VIDEO Mode") 
    } 

    func audioMode() { 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // Remove.. 
     for input in inputs { 
      if let deviceInput = input as? AVCaptureDeviceInput { 
       if deviceInput.device == backCameraDevice 
       || deviceInput.device == frontCameraDevice { 
        captureSession.removeInput(deviceInput) 
       } 
      } 
     } 

     print("Record Camera --> Set AUDIO Mode") 
    } 

    // MARK: - Util methods 
    fileprivate func deviceInputFrom(device: AVCaptureDevice?) -> AVCaptureDeviceInput? { 
     guard let validDevice = device else { return nil } 
     do { 
      return try AVCaptureDeviceInput(device: validDevice) 
     } catch let outError { 
      print("Device setup error occured: \(String(describing: outError))") 
      return nil 
     } 
    } 

    func swipeCamera() { 

     cameraDevice = cameraDevice == .front ? .back : .front 

     captureSession.beginConfiguration() 
     defer { captureSession.commitConfiguration() } 

     let inputs: [AVCaptureInput] = captureSession.inputs?.flatMap { $0 as? AVCaptureInput } ?? [] 

     // Remove... 
     for input in inputs { 
      if let deviceInput = input as? AVCaptureDeviceInput { 
       if deviceInput.device == backCameraDevice && cameraDevice == .front { 
        captureSession.removeInput(deviceInput) 
        photoSession.removeInput(deviceInput) 
        break; 
       } else if deviceInput.device == frontCameraDevice && cameraDevice == .back { 
        captureSession.removeInput(deviceInput) 
        photoSession.removeInput(deviceInput) 
        break; 
       } 
      } 
     } 

     // From 
     if cameraDevice == .front { 
      if let validFrontDevice = deviceInputFrom(device: frontCameraDevice) { 
       if !inputs.contains(validFrontDevice) { 
        captureSession.addInput(validFrontDevice) 
        photoSession.addInput(validFrontDevice) 
        print("Record Camera --> Swipe to Front Camera") 
       } 
      } 
     } 
     // Back 
     if cameraDevice == .back { 
      if let validBackDevice = deviceInputFrom(device: backCameraDevice) { 
       if !inputs.contains(validBackDevice) { 
        captureSession.addInput(validBackDevice) 
        photoSession.addInput(validBackDevice) 
        print("Record Camera --> Swipe to Back Camera") 
       } 
      } 
     } 
    } 
} 

// MARK: - Capture Output 
extension RecordCamera : AVCaptureFileOutputRecordingDelegate { 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 
     // Not implemented 
    } 

    func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 
     guard error == nil else { 
      if let photo = capturedPhoto { 
       delegate?.didSavedImage(image: photo) 
      } 
      return 
     } 
     delegate?.didSavedOutputFile(url: outputFileURL, error: error) 
    } 
} 

@available(iOS 10.0, *) 
extension RecordCamera : AVCapturePhotoCaptureDelegate { 

    func capture(_ captureOutput: AVCapturePhotoOutput, didCapturePhotoForResolvedSettings resolvedSettings: AVCaptureResolvedPhotoSettings) { 
     print("Picture taken") 
    } 

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { 

     guard error == nil else { 
      print("Failed Capturing Picture: \(String(describing: error!.localizedDescription))") 
      capturedPhoto = nil 
      //self.delegate.didSavedImage(image: nil) 
      return 
     } 

     if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, 
      let imageData = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { 
      print("Photo Saved!") 
      capturedPhoto = UIImage(data: imageData) 
      //self.delegate.didSavedImage(image: image) 
     } 

    } 
} 

Antwort

1

ich fast die gleiche Funktionalität haben, die Sie benötigen. Ich habe eine Capture-Sitzung erstellt und konfiguriert. Für die Videoausgabe habe ich AVCaptureVideoDataOutput Klasse, für Audio AVCaptureAudioDataOutput Klasse und für Fotos - AVCaptureStillImageOutput verwendet.

Ich verwendete AVAssetWriter um Video und Audio aufzunehmen, weil ich benutzerdefinierte Videobearbeitungen durchführen musste. Die Aufzeichnung erfolgt in
AVCaptureVideoDataOutputSampleBufferDelegate Methoden. Diese Delegate-Methode sieht so aus.

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 
    if !isRecordingVideo { 
     return 
    } 

    if captureOutput == self.videoOutput { 
     assetVideoWriterQueue.async { 
      if self.shouldStartWritingSession { 
       self.assetWriter.startSession(atSourceTime: CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) 
       self.shouldStartWritingSession = false 
      } 

      if self.assetWriterInputCamera.isReadyForMoreMediaData { 
       self.assetWriterInputCamera.append(sampleBuffer) 
      } 
     } 
    } 

    if captureOutput == self.audioOutput { 
     assetAudioWriterQueue.async { 
      let shouldStartWritingSession = self.shouldStartWritingSession 
      if self.assetWriterInputMicrofone.isReadyForMoreMediaData && shouldStartWritingSession == false { 
       self.assetWriterInputMicrofone.append(sampleBuffer) 
      } 

      if shouldStartWritingSession { 
       print("In audioOutput and CANNOT Record") 
      } 
     } 
    } 
} 

Mein sieht immer noch Bilderfassungs- wie folgt aus:

func captureStillImage(_ completion: @escaping ((Bool, UIImage?) -> Void)) { 
    guard self.state == .running else { 
     completion(false, nil) 
     return 
    } 

    backgroundQueue.async { 
     let connection = self.stillImageOutpup.connection(withMediaType: AVMediaTypeVideo) 

     self.stillImageOutpup.captureStillImageAsynchronously(from: connection, completionHandler: { (buffer, error) in 
      defer { 
       self.state = .running 
      } 

      guard let buffer = buffer, let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(buffer) else { 
       DispatchQueue.main.async { 
        completion(false, nil) 
       } 

       return 
      } 

      let image = UIImage(data: imageData) 

      DispatchQueue.main.async { 
       completion(true, image) 
      } 
     }) 
    } 
} 

Sie können fein wie auf Stackoverflow Asset Autoren zu verwenden. Zum Beispiel können Sie sich vertraut machen mit this

+0

Danke @ Xaoc1024! Ich werde das überprüfen! –

Verwandte Themen