0

Ich habe Probleme beim Aufnehmen von Videos mit dem bereitgestellten Code. Ich verwende Beispielcode, der für die Videoaufnahme erstellt wurde.Videoaufnahme mit AVFoundation in Swift für iOS

Insbesondere bin ich nicht in der Lage diese Zeile ohne diesen Fehler zu kompilieren: „Can not Werte vom Typ umwandeln‚Viewcontroller‘in dem angegebenen Typ‚AVCaptureFileOutputRecordingDelegate‘

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

Diese Zeile in einer IBAction Funktion befindet:

@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

Rest der Code ist hier:

import UIKit 
import AVFoundation 
import Darwin 




class ViewController: UIViewController { 



@IBOutlet weak var CameraView: UIImageView! 

@IBOutlet weak var RecordButton: UIButton! 

@IBOutlet weak var SelectFrButton: UIButton! 

@IBOutlet weak var ISOslider: UISlider! 

@IBOutlet weak var SSslider: UISlider! 

@IBOutlet weak var ISOtextfield: UITextField! 

@IBOutlet weak var SStextfield: UITextField! 

@IBOutlet weak var TorchSlider: UISlider! 

@IBOutlet weak var Torchtextfield: UITextField! 

var captureSession = AVCaptureSession(); 
var DisplaySessionOutput = AVCaptureVideoDataOutput(); 
var SaveSessionOutput = AVCaptureMovieFileOutput(); 
var previewLayer = AVCaptureVideoPreviewLayer(); 
var CaptureDevice:AVCaptureDevice? = nil; 
var CurrentTorchLevel:Float = 0.5; 


override func viewDidLoad() { 
    super.viewDidLoad() 

    captureSession.sessionPreset = AVCaptureSessionPresetHigh 
    // Loop through all the capture devices on this phone 

    let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified) 

    for device in (deviceDiscoverySession?.devices)! { 
     if(device.position == AVCaptureDevicePosition.back){ 
      do{ 

       try device.lockForConfiguration() 


       device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in 

        // Set text and sliders to correct levels 
        self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!; 
        self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!; 

        self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!); 
        self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!); 

        self.ISOtextfield.text = device.iso.description; 
        self.ISOslider.setValue(device.iso, animated: false) 

        self.SStextfield.text = device.exposureDuration.seconds.description; 
        self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false); 

        self.TorchSlider.minimumValue = 0.01; 
        self.TorchSlider.maximumValue = 1; 
        self.TorchSlider.value = 0.5; 
        self.Torchtextfield.text = "0.5"; 
       }) 




       //Turn torch on 

       if (device.torchMode == AVCaptureTorchMode.on) { 
        device.torchMode = AVCaptureTorchMode.off 
       } else { 
        try device.setTorchModeOnWithLevel(1.0) 

       } 

       device.unlockForConfiguration(); 

       CaptureDevice = device; 

       let input = try AVCaptureDeviceInput(device: CaptureDevice) 
       if(captureSession.canAddInput(input)){ 
        captureSession.addInput(input); 

        if(captureSession.canAddOutput(DisplaySessionOutput)){ 
         captureSession.addOutput(DisplaySessionOutput); 
         previewLayer = AVCaptureVideoPreviewLayer(session: captureSession); 
         previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 
         previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait; 
         CameraView.layer.addSublayer(previewLayer); 
        } 
       } 
      } 
      catch{ 
       print("exception!"); 
      } 
     } 
    } 

    CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1); 

    captureSession.startRunning() 


} 

    // Do any additional setup after loading the view, typically from a nib. 


override func viewDidLayoutSubviews() { 

    previewLayer.frame = CameraView.bounds 

} 


override func didReceiveMemoryWarning() { 
    super.didReceiveMemoryWarning() 
    // Dispose of any resources that can be recreated. 
} 


@IBAction func RecordButtonPressed(_ sender: Any) { 

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 

    var videoFileOutput = AVCaptureMovieFileOutput() 
    self.captureSession.addOutput(videoFileOutput) 

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filePath = documentsURL.appendingPathComponent("temp") 

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate) 

    RecordButton.setTitle("Stop", for: .normal); 

} 

@IBAction func ISOvaluechanged(_ sender: Any) { 

    SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func SSvaluechanged(_ sender: Any) { 

    let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000); 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel) 
} 

@IBAction func ISOtextchanged(_ sender: Any) { 

} 

@IBAction func SStextchanged(_ sender: Any) { 

    //let time = CMTimeMake(Int64(exposurelevel * 100000),100000); 

} 


@IBAction func ChooseButtonPressed(_ sender: Any) { 
} 

func ShowAlert(AlertMessage: String) { 

    let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert) 

    self.present(alertController, animated: true, completion:nil) 

    let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in 
    } 

    alertController.addAction(OKAction) 

} 

@IBAction func TorchSliderChanged(_ sender: Any) { 

    CurrentTorchLevel = self.TorchSlider.value; 
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel); 
} 

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) { 

    var newISOval = isolevel; 
    var newSSval = exposurelevel; 
    let newTorchVal = TorchLevel; 

    if(newISOval == FLT_MAX){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.maxISO)!; 
    } 

    else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) { 

     newISOval = (self.CaptureDevice?.activeFormat.minISO)!; 
    } 

    if(newSSval.timescale == 0){ 
     // Pass through 0,0 for maintaining current SS. 
    } 

    else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!) > 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!; 
    } 

    else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) { 

     newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!; 
    } 



     do { 

     try self.CaptureDevice?.lockForConfiguration(); 

     try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal); 

     CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in 

      // Set text and sliders to correct levels 
      self.ISOtextfield.text = self.CaptureDevice?.iso.description; 
      self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false) 

      self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description; 
      self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false); 

      self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false); 
      self.Torchtextfield.text = self.CurrentTorchLevel.description; 

     }) 

     self.CaptureDevice?.unlockForConfiguration(); 

    } 

    catch { 
     ShowAlert(AlertMessage: "Unable to set camera settings"); 
     self.CaptureDevice?.unlockForConfiguration(); 


    } 

} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) { 
    return 
} 

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) { 
    return 
} 

} 

Tha Danke für Ihre Hilfe!

Antwort

1

Erstellen Sie eine Erweiterung für Ihren UIViewController, die sie mit AVCaptureFileOutputRecordingDelegate übereinstimmt. Entfernen Sie und fügen Sie die letzten zwei Methoden in Ihrer ViewController-Klasse hinzu.

class ViewController:UIViewController { 
     //your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate 
    } 

    extension ViewController: AVCaptureFileOutputRecordingDelegate { 
    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 

Sie können das gleiche tun, indem Sie Ihre UIViewController wie unten erstreckt, aber ich dachte, dass ich Ihnen eine saubere Lösung, wie oben geben würde. Du kannst wählen.

class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate { 
    //your methods as usual but you keep your final two methods this time 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) { 

} 

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 

} 
    } 
+0

Hallo, danke für die Hilfe. Ich habe die zweite Lösung ausprobiert, aber den Fehler erhalten: "Typ 'ViewController' bestätigt nicht das Protokoll 'AVCaptureFileOutputRecordingDelegate'" – aforward

+0

Um AVCaptureFileOutputRecordingDelegate zu entsprechen, benötigen Sie die beiden captureOutput-Methoden in ViewController. Ich habe meine Lösung aktualisiert, um diese klarer einzubeziehen. Hast du sie immer noch da drin? Der erste Fehler, den Sie bekommen haben, war, weil View Controller AVCaptureFileOutputRecordingDelegate nicht erweitert hat, also wenn Sie 'self' in Ihrer @IBAction verwendet haben, hat Xcode versucht, einen UIViewController in einen AVCaptureFileOutputRecordingDelegate umzuwandeln. – gwinyai

+0

Ich habe es herausgefunden. Die Methoden haben sich in Swift 3.0 geändert: https://developer.apple.com/reference/avfoundation/avcapturefileoutputrecordingdelegate (Ich würde vorschlagen, dass Sie Ihre Antwort für die zukünftige Fehlerbehebung ändern. Vielen Dank nochmal für Ihre Hilfe! – aforward

Verwandte Themen