2017-08-10 1 views
10

Unter der Annahme, dass wir zwei Video-Assets (AVAsset Objekte) haben, nennen sie sie leer und Haupt, wo Haupt ein Video von zufälliger begrenzten Länge ist, lassen Sie sich 2-5 Minuten sagen, und leer ist immer ein 4 Sekunden Video, möchten wir die Videos in der folgenden Reihenfolge fusionieren:Was verursacht AVMutableComposition, um die Größe des Videos drastisch zu erhöhen? - iOS, Swift, AVFoundation

blank - main - blank

// Create AVMutableComposition Object.This object will hold our multiple AVMutableCompositionTrack. 

    let mixComposition = AVMutableComposition() 

    let assets = [blank, main, blank] 
    var totalTime : CMTime = CMTimeMake(0, 0) 
    var atTimeM: CMTime = CMTimeMake(0, 0) 

    Utils.log([blank.duration, main.duration]) 

    // VIDEO TRACK 
    let videoTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) 

    for (index,asset) in assets.enumerated() { 

     do { 

      if index == 0 { 
       atTimeM = kCMTimeZero 
      } else { 
       atTimeM = totalTime // <-- Use the total time for all the videos seen so far. 
      } 

      try videoTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: asset.tracks(withMediaType: AVMediaTypeVideo)[0], at: atTimeM) 

     } catch let error as NSError { 
      Utils.log("error: \(error)") 
     } 

     totalTime = CMTimeAdd(totalTime, asset.duration) 
    } 

    // AUDIO TRACK 
    let audioTrack = mixComposition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: kCMPersistentTrackID_Invalid) 
    do { 
     try audioTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, main.duration), of: main.tracks(withMediaType: AVMediaTypeAudio)[0], at: blank.duration) 
    } catch _ { 
     completionHandler(nil, ErrorType(rawValue: "Unable to add audio in composition.")) 
     return 
    } 

    let outputURL = mainVideoObject.getDirectoryURL()?.appendingPathComponent("video-with-blank.mp4") 

    guard let exporter = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPreset1280x720) else { 
     completionHandler(nil, ErrorType(rawValue: "Unable to create export session.")) 
     return 
    } 

    let mainInstruction = AVMutableVideoCompositionInstruction() 

    mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(blank.duration, CMTimeAdd(main.duration, blank.duration))) 

    // Fixing orientation 
    let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let firstAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0] 
    firstLayerInstruction.setTransform(firstAssetTrack.preferredTransform, at: kCMTimeZero) 
    firstLayerInstruction.setOpacity(0.0, at: blank.duration) 

    let secondLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let secondAssetTrack = main.tracks(withMediaType: AVMediaTypeVideo)[0] 
    var isSecondAssetPortrait = false 
    let secondTransform = secondAssetTrack.preferredTransform 
    if (secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) { 
     isSecondAssetPortrait = true 
    } 
    if (secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) { 
     isSecondAssetPortrait = true 
    } 
    secondLayerInstruction.setTransform(secondAssetTrack.preferredTransform, at: blank.duration) 
    secondLayerInstruction.setOpacity(0.0, at: CMTimeAdd(blank.duration, main.duration)) 

    let thirdLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 
    let thirdAssetTrack = blank.tracks(withMediaType: AVMediaTypeVideo)[0] 
    thirdLayerInstruction.setTransform(thirdAssetTrack.preferredTransform, at: CMTimeAdd(blank.duration, main.duration)) 

    mainInstruction.layerInstructions = [firstLayerInstruction, secondLayerInstruction, thirdLayerInstruction] 

    var naturalSize = CGSize() 
    if(isSecondAssetPortrait) { 
     naturalSize = CGSize(width: secondAssetTrack.naturalSize.height, height: secondAssetTrack.naturalSize.width) 
    } else { 
     naturalSize = secondAssetTrack.naturalSize 
    } 

    let renderWidth = naturalSize.width 
    let renderHeight = naturalSize.height 

    let mainCompositionInst = AVMutableVideoComposition() 
    mainCompositionInst.instructions = [mainInstruction] 
    mainCompositionInst.frameDuration = CMTimeMake(1, 30) 
    mainCompositionInst.renderSize = CGSize(width: renderWidth, height: renderHeight) 

    exporter.outputURL = outputURL 
    exporter.outputFileType = AVFileTypeMPEG4 
    exporter.videoComposition = mainCompositionInst 
    //exporter.shouldOptimizeForNetworkUse = true 

    exporter.exportAsynchronously { 
     if exporter.status == .completed { 
      completionHandler(AVAsset(url: outputURL!), nil) 
     } else { 
      completionHandler(nil, ErrorType(rawValue: "Unable to export video.")) 
      if let error = exporter.error { 
       Utils.log("Unable to export video. \(error)") 
      } 
     } 
    } 

Assu Da der Original-Videorekorder für 5 Minuten in 720p-Qualität ca. 200MB Speicherplatz benötigt, sollte das 4s-leere Video am Anfang und am Ende des Hauptvideos die Größe nicht drastisch ändern und sollte sehr schnell verarbeitet werden.

Das Ergebnis ist jedoch ein Video, das 2 bis 2,5x der Größe des ursprünglichen Videos (also 400 - 500 MB) und dauert zu lange, um zu verarbeiten.

Bitte raten,

Dank

+0

können Sie mir bitte zwischen zum Anhängen in verwendet werden, die Sie leere Videodatei probieren senden. – MinuMaster

Antwort

1

Hier habe ich eine benutzerdefinierte Klasse vorbereitet, wo Sie nur Ihren Namen, Ihre Videos passieren können und diejenigen Video in dem Bundle halten. Sobald Sie Ihre App ausgeführt haben, wird eine neue Videodatei gemäß Ihren Anforderungen erstellt und in den Verzeichnispfad des Anwendungsdokuments eingefügt.

Mit Swift 4 Ich habe diese Demo vorbereitet

// 
// ViewController.swift 
// SOVideoMergingDemo 
// 
// Created by iOS Test User on 03/01/18. 
// Copyright © 2018 Test User. Ltd. All rights reserved. 
// 

import UIKit 
import AVFoundation 
import MediaPlayer 
import Photos 
import AssetsLibrary 
import AVKit 


class ViewController : UIViewController { 

    //-------------------------------------------------- 
    //MARK: 
    //MARK: - IBOutlets 
    //-------------------------------------------------- 




    //-------------------------------------------------- 
    //MARK: 
    //MARK: - Properties 
    //-------------------------------------------------- 

    var videoUrls : [URL]  = [] 
    var arrVideoAsset : [AVAsset] = [] 
    let video1 = "1" 
    let video2 = "2" 
    let outPutVideo = "MergedVideo.mp4" 

    let semaphore = DispatchSemaphore(value: 1) 


    //-------------------------------------------------- 
    //MARK: 
    //MARK: - Custom Methods 
    //-------------------------------------------------- 

    func getVideoURL(forVideo : String) -> URL { 
     let videoPath = Bundle.main.path(forResource: forVideo, ofType:"mp4") 
     let vidURL = URL(fileURLWithPath: videoPath!) 
     return vidURL 
    } 

    //-------------------------------------------------- 

    func mergeVideos(arrVideoAsset : [AVAsset]) { 

     let mixComposition = AVMutableComposition() 

     //Tracks to insert in Composition for Merging 
     // Create video tracks 
     let firstTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 
     let secondTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 
     let thirdTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) 

     do { 
      try firstTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: kCMTimeZero) 
     } catch _ { 
      print("Failed to load first track") 
     } 

     do { 
      try secondTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[1].duration), of: arrVideoAsset[1].tracks(withMediaType: .video)[0], at: arrVideoAsset[0].duration) 
     } catch _ { 
      print("Failed to load second track") 
     } 

     do { 
      try thirdTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, arrVideoAsset[0].duration), of: arrVideoAsset[0].tracks(withMediaType: .video)[0], at: arrVideoAsset[1].duration) 
     } catch _ { 
      print("Failed to load second track") 
     } 

     //This Instruciton is Created for Merging Video Tracks 
     let compositionInstruction = AVMutableVideoCompositionInstruction() 
     compositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero,CMTimeAdd(arrVideoAsset[0].duration, CMTimeAdd(arrVideoAsset[1].duration, arrVideoAsset[2].duration))) 

     //Creating Layer Instruction for Videos 
     let firstInstruction = videoCompositionInstructionForTrack(firstTrack!, asset: arrVideoAsset[0]) 
     firstInstruction.setOpacity(0.0, at: arrVideoAsset[0].duration) 
     let secondInstruction = videoCompositionInstructionForTrack(secondTrack!, asset: arrVideoAsset[1]) 
     secondInstruction.setOpacity(0.0, at: arrVideoAsset[1].duration) 
     let thirdInstruction = videoCompositionInstructionForTrack(thirdTrack!, asset: arrVideoAsset[2]) 

     compositionInstruction.layerInstructions = [firstInstruction, secondInstruction,thirdInstruction] 

     //By Changing These Height and Width User can affect Size of Merged Video. Calucalte it Carefully and As per you needs 
     let height = (Float((firstTrack?.naturalSize.height)!) < Float((secondTrack?.naturalSize.height)!)) ? firstTrack?.naturalSize.height : secondTrack?.naturalSize.height 

     let width = (Float((firstTrack?.naturalSize.width)!) < Float((secondTrack?.naturalSize.width)!)) ? firstTrack?.naturalSize.width : secondTrack?.naturalSize.width 

     let mainComposition = AVMutableVideoComposition() 
     mainComposition.instructions = [compositionInstruction] 
     mainComposition.frameDuration = CMTimeMake(1, 30) 
     mainComposition.renderSize = CGSize(width:width!,height: height!) 

     let exporter = AVAssetExportSession(asset:mixComposition, presetName: AVAssetExportPresetHighestQuality) 
     exporter?.outputURL = URL(fileURLWithPath: getDocumentDirectoryPath() + "/" + outPutVideo) 
     exporter?.outputFileType = AVFileType.mp4 
     exporter?.shouldOptimizeForNetworkUse = true 
     exporter?.videoComposition = mainComposition 
     print(self.getDocumentDirectoryPath()) 

     exporter?.exportAsynchronously(completionHandler: { 
      DispatchQueue.main.async { 
       if exporter?.status == AVAssetExportSessionStatus.completed { 
        do { 
         let videoData = try Data(contentsOf: exporter!.outputURL!) 
         try videoData.write(to: URL(fileURLWithPath : self.getDocumentDirectoryPath() + "/" + self.outPutVideo), options: Data.WritingOptions.atomic) 
        } catch { 
         print("Failed to Save video ===>>> \(error.localizedDescription)") 
        } 


        //Uncomment This If you want to save video in Photos Library 
//     PHPhotoLibrary.shared().performChanges({ 
//      PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: (exporter?.outputURL)!) 
//     }, completionHandler: { (success, error) in 
//      if success { 
//       let fetchOptions = PHFetchOptions() 
//       fetchOptions.sortDescriptors = [NSSortDescriptor.init(key:"creationDate", ascending: false)] 
//       _ = PHAsset.fetchAssets(with: .video, options:fetchOptions).firstObject 
//      } else { 
//       print("Error in Saving File in Photo Libaray -> \(String(describing: error?.localizedDescription))") 
//      } 
//     }) 
       } else { 
        print("Error -> \(String(describing: exporter?.error?.localizedDescription))") 
       } 
      } 
     }) 

    } 

    //-------------------------------------------------- 

    //This Methiod is Used to Make Layer Instruction for Particular Video 
    func videoCompositionInstructionForTrack(_ track: AVCompositionTrack, asset: AVAsset) -> AVMutableVideoCompositionLayerInstruction { 
     let instruction = AVMutableVideoCompositionLayerInstruction(assetTrack: track) 
     let assetTrack = asset.tracks(withMediaType: AVMediaType.video)[0] 
     let scale : CGAffineTransform = CGAffineTransform(scaleX: 1, y:1) 
     instruction.setTransform(assetTrack.preferredTransform.concatenating(scale), at: kCMTimeZero) 
     return instruction 
    } 

    //-------------------------------------------------- 

    func getDocumentDirectoryPath() -> String { 
     let arrPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true) 
     return arrPaths[0] 
    } 

    //-------------------------------------------------- 
    //MARK: 
    //MARK: - View Life Cycle Methods 
    //-------------------------------------------------- 

    override func viewDidLoad() { 
     super.viewDidLoad() 
     // Do any additional setup after loading the view. 

     //Prepare Video Assets 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1))) 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video2))) 
     arrVideoAsset.append(AVAsset(url:getVideoURL(forVideo:video1))) 

     //Merge this Videos 
     mergeVideos(arrVideoAsset:arrVideoAsset) 
    } 
} 
Verwandte Themen