2017-03-28 5 views
0

Ich versuche derzeit herauszufinden, wie die AVCapturePhotoOutput-Methode von iOS 10 verwendet wird, und habe dabei Probleme. Ich fühle mich wie ich bin es aber einen Fehler fortsetzen Recht bekommen Empfang:Verwenden von AVCapturePhotoOutput in iOS10 - NSGenericException

* Terminating app due to uncaught exception 'NSGenericException', reason: '* -[AVCapturePhotoOutput capturePhotoWithSettings:delegate:] No active and enabled video connection'

habe ich versucht, in diese Codezeile setzen entweder die AVCapturePhotoCaptureDelegate oder meine didPressTakePhoto Funktion:

if let videoConnection = stillImageOutput.connection(withMediaType: AVMediaTypeVideo) { 
    videoConnection.videoOrientation = AVCaptureVideoOrientation.portrait; 
    ... 
} 

Hier der Code, den ich bis jetzt habe:

import AVFoundation 
import UIKit 

class Camera: UIViewController, UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCapturePhotoCaptureDelegate { 

    @IBOutlet weak var cameraView: UIView! 
    @IBOutlet weak var imageView: UIImageView! 

    var captureSession : AVCaptureSession? 
    var stillImageOutput : AVCapturePhotoOutput? 
    var stillImageOutputSettings : AVCapturePhotoSettings? 
    var previewLayer : AVCaptureVideoPreviewLayer? 

    var didTakePhoto = Bool(); 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     // Do any additional setup after loading the view. 
    } 

    override func didReceiveMemoryWarning() { 
     super.didReceiveMemoryWarning() 
     // Dispose of any resources that can be recreated. 
    } 

    override func viewDidAppear(_ animated: Bool) { 
     super.viewDidAppear(animated); 

     previewLayer?.frame = cameraView.bounds; 
    } 

    override func viewWillAppear(_ animated: Bool) { 
     super.viewWillAppear(animated); 

     captureSession = AVCaptureSession(); 
     captureSession?.sessionPreset = AVCaptureSessionPreset1920x1080; 

     stillImageOutput = AVCapturePhotoOutput(); 

     let backCamera = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo); 

     do { 
      let input = try AVCaptureDeviceInput(device: backCamera) 

      if (captureSession?.canAddInput(input))! { 
       captureSession?.addInput(input); 

       if (captureSession?.canAddOutput(stillImageOutput))! { 
        captureSession?.canAddOutput(stillImageOutput); 

        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession); 
        previewLayer?.videoGravity = AVLayerVideoGravityResizeAspect; 
        previewLayer?.connection.videoOrientation = AVCaptureVideoOrientation.portrait; 
        cameraView.layer.addSublayer(previewLayer!); 
        captureSession?.startRunning(); 
       } 
      } 
     } catch { 
      print(error); 
     } 
    } 

    func capture(_ captureOutput: AVCapturePhotoOutput, didFinishProcessingPhotoSampleBuffer photoSampleBuffer: CMSampleBuffer?, previewPhotoSampleBuffer: CMSampleBuffer?, resolvedSettings: AVCaptureResolvedPhotoSettings, bracketSettings: AVCaptureBracketedStillImageSettings?, error: Error?) { 
     if let error = error { 
      print(error.localizedDescription); 
     } 

     if let sampleBuffer = photoSampleBuffer, let previewBuffer = previewPhotoSampleBuffer, let dataImage = AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: sampleBuffer, previewPhotoSampleBuffer: previewBuffer) { 
      print(UIImage(data: dataImage)?.size as Any); 

      let dataProvider = CGDataProvider(data: dataImage as CFData); 
      let cgImageRef: CGImage! = CGImage(jpegDataProviderSource: dataProvider!, decode: nil, shouldInterpolate: true, intent: .defaultIntent); 
      let image = UIImage(cgImage: cgImageRef, scale: 1.0, orientation: UIImageOrientation.right); 

      self.imageView.image = image; 
      self.imageView.isHidden = false; 
     } 
    } 

    func didPressTakePhoto() { 
      stillImageOutputSettings = AVCapturePhotoSettings(); 

      let previewPixelType = stillImageOutputSettings?.availablePreviewPhotoPixelFormatTypes.first!; 
      let previewFormat = [kCVPixelBufferPixelFormatTypeKey as String: previewPixelType, 
           kCVPixelBufferWidthKey as String: 160, 
           kCVPixelBufferHeightKey as String: 160]; 
      stillImageOutputSettings?.previewPhotoFormat = previewFormat; 

      stillImageOutput.capturePhoto(with: stillImageOutputSettings!, delegate: self); 
    } 

    func didPressTakeAnother() { 
     if (didTakePhoto == true) { 
      imageView.isHidden = true; 
      didTakePhoto = false; 
     } else { 
      captureSession?.startRunning(); 
      didTakePhoto = true; 
      didPressTakePhoto(); 
     } 
    } 

    override func touchesBegan(_ touches: Set<UITouch>, with event: UIEvent?) { 
     didPressTakeAnother(); 
    } 
} 

Irgendwelche Vorschläge?

Vielen Dank im Voraus!

+0

laufen Sie diese auf dem Simulator? –

+0

Nein, es läuft von meinem iPhone 6S + –

Antwort

0

Änderung AVCaptureSessionPreset1920x1080 zu AVCaptureSessionPresetHigh

versuchen es

Verwandte Themen