2017-08-23 3 views
-1

Ich bin neu in Swift auch Pfahl Überlauf. Erweitertes Dankeschön für Aufmerksamkeit. Im Grunde versuche ich eine benutzerdefinierte Kamera zu bauen, die Video mit Audio aufnehmen wird. Das bedeutet, dass Video mit Ton wiedergegeben wird, wenn ich dieses Video spiele. An den paar Tagen habe ich versucht diese Kamera zu bauen. Ich habe schon mein Tutorial verfolgt aber immer noch etwas von meiner Kamera vermisst. Ich war Versuch, wie meine benutzerdefinierte Kamera nur Video aufzeichnet. vielleicht notiert es nicht Audio. Ich verstehe nicht. Ich suchte nach dieser Antwort, fand keine passende Antwort dafür.Benutzerdefinierte Kamera, Video spielt nicht mit Audio in Swift

hier ist, was ich tat

import UIKit 
import AVFoundation 
import SVProgressHUD 
import MediaPlayer 
import MobileCoreServices 
import AVKit 
var videoUrl = [AnyObject]() 


class TestViewController: UIViewController { 

@IBOutlet var viewVidioPlayer: UIView! 
@IBOutlet weak var myView: UIView! 

var session: AVCaptureSession? 
var userreponsevideoData = NSData() 
var userreponsethumbimageData = NSData() 

override func viewDidLoad() { 
    super.viewDidLoad() 

} 

override func viewDidAppear(_ animated: Bool) { 
    super.viewDidAppear(animated) 
} 

// hier i Sitzung func create() {

var input: AVCaptureDeviceInput? 
    let movieFileOutput = AVCaptureMovieFileOutput() 
    var prevLayer: AVCaptureVideoPreviewLayer? 
    prevLayer?.frame.size = myView.frame.size 
    session = AVCaptureSession() 
    let error: NSError? = nil 
    do { 
     input = try AVCaptureDeviceInput(device: self.cameraWithPosition(position: .front)!) } catch {return} 
    if error == nil { 
     session?.addInput(input) 
    } else { 
     print("camera input error: \(String(describing: error))") 
    } 
    prevLayer = AVCaptureVideoPreviewLayer(session: session) 
    prevLayer?.frame.size = myView.frame.size 
    prevLayer?.videoGravity = AVLayerVideoGravityResizeAspectFill 
    prevLayer?.connection.videoOrientation = .portrait 
    myView.layer.addSublayer(prevLayer!) 
    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0] 
    let filemainurl = NSURL(string: ("\(documentsURL.appendingPathComponent("temp"))" + ".mp4")) 


    let maxDuration: CMTime = CMTimeMake(600, 10) 
    movieFileOutput.maxRecordedDuration = maxDuration 
    movieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024 
    if self.session!.canAddOutput(movieFileOutput) { 
     self.session!.addOutput(movieFileOutput) 
    } 
    session?.startRunning() 
    movieFileOutput.startRecording(toOutputFileURL: filemainurl! as URL, recordingDelegate: self) 

} 
func cameraWithPosition(position: AVCaptureDevicePosition) -> AVCaptureDevice? { 
    let devices = AVCaptureDevice.devices(withMediaType: AVMediaTypeVideo) 
    for device in devices! { 
     if (device as AnyObject).position == position { 
      return device as? AVCaptureDevice 
     } 
    } 
    return nil 
} 
@IBAction func pressbackbutton(sender: AnyObject) { 
    session?.stopRunning() 

} 

@IBAction func Record(_ sender: Any) { 
    createSession() 
} 
@IBAction func play(_ sender: Any) { 
    self.videoPlay() 
} 
func videoPlay() 
{ 

    let documentsUrl = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! 

    do { 
     // Get the directory contents urls (including subfolders urls) 
     let directoryContents = try FileManager.default.contentsOfDirectory(at: documentsUrl, includingPropertiesForKeys: nil, options: []) 
     print(directoryContents) 

     // if you want to filter the directory contents you can do like this: 
     videoUrl = directoryContents.filter{ $0.pathExtension == "mp4" } as [AnyObject] 
     print("mp3 urls:",videoUrl) 


     let playerController = AVPlayerViewController() 
     playerController.delegate = self as? AVPlayerViewControllerDelegate 
     let movieURL = videoUrl[0] 


     print(movieURL) 

     let player = AVPlayer(url: movieURL as! URL) 
     playerController.player = player 
     self.addChildViewController(playerController) 
     self.view.addSubview(playerController.view) 
     playerController.view.frame = self.view.frame 

     player.play() 
     player.volume = 1.0 
     player.rate = 1.0 



    } catch let error as NSError { 
     print(error.localizedDescription) 
    } 




    } 
    } 

Erweiterung TestViewController erstellen: AVCaptureFileOutputRecordingDelegate {

@available(iOS 4.0, *) 
private func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: URL!, fromConnections connections: [AnyObject]!) { 
    print(fileURL) 
} 
func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) { 
    let filemainurl = outputFileURL 

    do 
    { 
     let asset = AVURLAsset(url: filemainurl! as URL, options: nil) 
     //AVURLAsset(URL: filemainurl as! URL, options: nil) 
     print(asset) 
     let imgGenerator = AVAssetImageGenerator(asset: asset) 
     imgGenerator.appliesPreferredTrackTransform = true 
     let cgImage = try imgGenerator.copyCGImage(at: CMTimeMake(0, 1), actualTime: nil) 

     let uiImage = UIImage(cgImage: cgImage) 

     userreponsethumbimageData = try NSData(contentsOf: filemainurl! as URL) 

     print(userreponsethumbimageData.length) 
     print(uiImage) 
     // imageData = UIImageJPEGRepresentation(uiImage, 0.1) 
    } 
    catch let error as NSError 
    { 
     print(error) 
     return 
    } 

    SVProgressHUD.show(with: SVProgressHUDMaskType.clear) 
    let VideoFilePath = NSURL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("mergeVideo\(arc4random()%1000)d")!.appendingPathExtension("mp4").absoluteString 
    if FileManager.default.fileExists(atPath: VideoFilePath) 

    { 
     do 

     { 
      try FileManager.default.removeItem(atPath: VideoFilePath) 
     } 
     catch { } 

    } 
    let tempfilemainurl = NSURL(string: VideoFilePath)! 
    let sourceAsset = AVURLAsset(url: filemainurl! as URL, options: nil) 
    let assetExport: AVAssetExportSession = AVAssetExportSession(asset: sourceAsset, presetName: AVAssetExportPresetMediumQuality)! 
    assetExport.outputFileType = AVFileTypeQuickTimeMovie 
    assetExport.outputURL = tempfilemainurl as URL 

    assetExport.exportAsynchronously {() -> Void in 
     switch assetExport.status 
     { 
     case AVAssetExportSessionStatus.completed: 
      DispatchQueue.main.async(execute: { 
       do 
       { 
        SVProgressHUD .dismiss() 
        self.userreponsevideoData = try NSData(contentsOf: tempfilemainurl as URL, options: NSData.ReadingOptions()) 
        print("MB - \(self.userreponsevideoData.length) byte") 


       } 
       catch 
       { 
        SVProgressHUD .dismiss() 
        print(error) 
       } 
      }) 
     case AVAssetExportSessionStatus.failed: 
      print("failed \(assetExport.error)") 
     case AVAssetExportSessionStatus.cancelled: 
      print("cancelled \(assetExport.error)") 
     default: 
      print("complete") 
      SVProgressHUD .dismiss() 
     } 

    } 



} 

}

Dort habe ich alles getan. also verstehe ich nicht, was in diesem Code fehlt. Warum Audio nicht mit Video wiedergegeben wird oder warum Audio nicht mit Video rekodiert wird.

Antwort

0

Verwenden Sie diese Kokospalmen für Ihr Projekt. Es macht Ihre Arbeit sehr einfach. Es hat alle Anweisungen, was zu tun ist und enthält auch ein Demo-Projekt, um zu testen, wie es funktioniert.

SwiftyCam