Ich möchte einen benutzerdefinierten Videorekorder in meiner App erstellen. Momentan kann ich das Video aufnehmen und speichern, aber ich möchte dem Video Filter hinzufügen, wenn es aufgenommen und das Video mit neuem Filter im Fotoalbum gespeichert hat. Dies ist mein Code, um Videos aufzunehmen und zu speichern.Video mit AVCaptureSession aufnehmen, CIFilter hinzufügen und im Fotoalbum speichern
let captureSession = AVCaptureSession()
let fileOutput = AVCaptureMovieFileOutput()
func initVideoRecording() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
try AVAudioSession.sharedInstance().setActive(true)
}catch {
print("error in audio")
}
let session = AVCaptureSession()
session.beginConfiguration()
session.sessionPreset = AVCaptureSessionPresetMedium
let videoLayer = AVCaptureVideoPreviewLayer(session: session)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
videoLayer.frame = myImage.bounds
myImage.layer.addSublayer(videoLayer)
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do
{
let input = try AVCaptureDeviceInput(device: backCamera)
let audioInput = try AVCaptureDeviceInput(device: audio)
session.addInput(input)
session.addInput(audioInput)
}
catch
{
print("can't access camera")
return
}
session.addOutput(fileOutput)
session.commitConfiguration()
session.startRunning()
}
@IBAction func recordFunc() {
if fileOutput.recording {
myButton.setTitle("record", forState: .Normal)
fileOutput.stopRecording()
}else{
let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4")
fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)
myButton.setTitle("stop", forState: .Normal)
}
}
func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
//to save record video to photos album
UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil)
}
Ich versuche AVCaptureVideoDataOutput
zu verwenden und in seinem Delegierten verwende ich diesen Code
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
connection.videoOrientation = AVCaptureVideoOrientation.Portrait
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
let comicEffect = CIFilter(name: "CIComicEffect")
comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
dispatch_async(dispatch_get_main_queue())
{
self.myImage.image = filteredImage
}
}
Mit diesem Code, um es nur den Filter angezeigt, aber sie nicht aufzeichnen.
=======================/das ist die Lösung für meine Frage \ ============= === bitte nicht, dass dieser Code Verwendung swift 2 und Xcode 7,3
let captureSession = AVCaptureSession()
let videoOutput = AVCaptureVideoDataOutput()
let audioOutput = AVCaptureAudioDataOutput()
var adapter:AVAssetWriterInputPixelBufferAdaptor!
var record = false
var videoWriter:AVAssetWriter!
var writerInput:AVAssetWriterInput!
var audioWriterInput:AVAssetWriterInput!
var lastPath = ""
var starTime = kCMTimeZero
var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height)
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
video()
}
func video() {
do {
try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
try AVAudioSession.sharedInstance().setActive(true)
}catch {
print("error in audio")
}
captureSession.beginConfiguration()
captureSession.sessionPreset = AVCaptureSessionPresetMedium
let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
//videoLayer.frame = myImage.bounds
//myImage.layer.addSublayer(videoLayer)
view.layer.addSublayer(videoLayer)
let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
do
{
let input = try AVCaptureDeviceInput(device: backCamera)
let audioInput = try AVCaptureDeviceInput(device: audio)
captureSession.addInput(input)
captureSession.addInput(audioInput)
}
catch
{
print("can't access camera")
return
}
let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)
videoOutput.setSampleBufferDelegate(self,queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
captureSession.addOutput(videoOutput)
captureSession.addOutput(audioOutput)
captureSession.commitConfiguration()
captureSession.startRunning()
}
@IBAction func recordFunc() {
if record {
myButton.setTitle("record", forState: .Normal)
record = false
self.writerInput.markAsFinished()
audioWriterInput.markAsFinished()
self.videoWriter.finishWritingWithCompletionHandler {() -> Void in
print("FINISHED!!!!!")
UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil)
}
}else{
let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4")
lastPath = fileUrl.path!
videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4)
let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]
writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
writerInput.expectsMediaDataInRealTime = true
audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject])
videoWriter.addInput(writerInput)
videoWriter.addInput(audioWriterInput)
adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject])
videoWriter.startWriting()
videoWriter.startSessionAtSourceTime(starTime)
record = true
myButton.setTitle("stop", forState: .Normal)
}
}
func getCurrentDate()->String{
let format = NSDateFormatter()
format.dateFormat = "dd-MM-yyyy hh:mm:ss"
format.locale = NSLocale(localeIdentifier: "en")
let date = format.stringFromDate(NSDate())
return date
}
extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
if captureOutput == videoOutput {
connection.videoOrientation = AVCaptureVideoOrientation.Portrait
let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
let comicEffect = CIFilter(name: "CIHexagonalPixellate")
comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)
let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
//let filteredImage = UIImage(CIImage: cameraImage)
if self.record == true{
dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), {
if self.record == true{
if self.writerInput.readyForMoreMediaData {
let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime)
print("video is \(bo)")
}
}
})
}
dispatch_async(dispatch_get_main_queue())
{
self.myImage.image = filteredImage
}
}else if captureOutput == audioOutput{
if self.record == true{
let bo = audioWriterInput.appendSampleBuffer(sampleBuffer)
print("audio is \(bo)")
}
}
}
func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
let context:CIContext? = CIContext(options: nil)
if context != nil {
return context!.createCGImage(inputImage, fromRect: inputImage.extent)
}
return nil
}
func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) {
var title = "Success"
var message = "Video was saved"
if let saveError = error {
title = "Error"
message = "Video failed to save"
}
let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert)
alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil))
presentViewController(alert, animated: true, completion: nil)
}
diese Methoden das ist in DejalActivityView es in Objective-c und ich es Swift nicht konvertieren konnte so, wenn jemand es umwandeln kann Bitte bearbeiten Sie meinen Code und konvertieren Sie ihn
+ (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
// CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
+(NSDictionary *)getAdapterDictionary{
NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
return sourcePixelBufferAttributesDictionary;
}
+(NSDictionary *) getAudioDictionary{
AudioChannelLayout acl;
bzero(&acl, sizeof(acl));
acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
NSDictionary* audioOutputSettings = nil;
audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
[ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
//[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
[ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
[ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
[ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,
nil ];
// NSDictionary* audioOutputSettings = nil;
// audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
// [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey,
// [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
// [ NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,
// nil ];
return audioOutputSettings;
}
Wenn ich diesen Ansatz Videodauer verwendet, die gespeichert wird, ist in Stunden. –