0

AVFoundation fügt meinem Video kein Overlay hinzu. Ich bin mir nicht sicher, was ich falsch mache. Ich habe versucht, das Overlay komplett weiß zu machen, aber es wird nicht auf dem Video platziert. Wenn das Video abgespielt wird, muss es die AVMutableComposition-Spur wiedergeben und nicht export.videoComposition, die ich hinzufüge. Ich bin bei AVFoundation nicht genug erfahren, um zu wissen, was falsch läuft.Overlay-Video in AVFoundation

AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init]; 

// 3 - Video track 
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo 
                    preferredTrackID:kCMPersistentTrackID_Invalid]; 
// [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,self.videoAsset.duration) 
//      ofTrack:[[self.videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] 
//       atTime:kCMTimeZero error:nil]; 
    CMTime insertTime = kCMTimeZero; 
    for(AVURLAsset *videoAsset in self.videoArray){ 
     [videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:insertTime error:nil]; 

     // Updating the insertTime for the next insert 
     insertTime = CMTimeAdd(insertTime, videoAsset.duration); 
    } 

// 3.1 - Create AVMutableVideoCompositionInstruction 
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 
mainInstruction.timeRange = videoTrack.timeRange; 

// 3.2 - Create an AVMutableVideoCompositionLayerInstruction for the video track and fix the orientation. 
AVMutableVideoCompositionLayerInstruction *videolayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack]; 
AVAssetTrack *videoAssetTrack = [[videoTrack.asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
UIImageOrientation videoAssetOrientation_ = UIImageOrientationUp; 
BOOL isVideoAssetPortrait_ = NO; 
CGAffineTransform videoTransform = videoAssetTrack.preferredTransform; 
if (videoTransform.a == 0 && videoTransform.b == 1.0 && videoTransform.c == -1.0 && videoTransform.d == 0) { 
    videoAssetOrientation_ = UIImageOrientationRight; 
    isVideoAssetPortrait_ = YES; 
} 
if (videoTransform.a == 0 && videoTransform.b == -1.0 && videoTransform.c == 1.0 && videoTransform.d == 0) { 
    videoAssetOrientation_ = UIImageOrientationLeft; 
    isVideoAssetPortrait_ = YES; 
} 
if (videoTransform.a == 1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == 1.0) { 
    videoAssetOrientation_ = UIImageOrientationUp; 
} 
if (videoTransform.a == -1.0 && videoTransform.b == 0 && videoTransform.c == 0 && videoTransform.d == -1.0) { 
    videoAssetOrientation_ = UIImageOrientationDown; 
} 
[videolayerInstruction setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 
[videolayerInstruction setOpacity:0.0 atTime:videoTrack.timeRange.duration]; 

// 3.3 - Add instructions 
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videolayerInstruction,nil]; 

AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition]; 

CGSize naturalSize; 
if(isVideoAssetPortrait_){ 
    naturalSize = CGSizeMake(videoAssetTrack.naturalSize.height, videoAssetTrack.naturalSize.width); 
} else { 
    naturalSize = videoAssetTrack.naturalSize; 
} 

float renderWidth, renderHeight; 
renderWidth = naturalSize.width; 
renderHeight = naturalSize.height; 
mainCompositionInst.renderSize = CGSizeMake(renderWidth, renderHeight); 
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction]; 
mainCompositionInst.frameDuration = CMTimeMake(1, 30); 

    // [self applyVideoEffectsToComposition:mainCompositionInst size:naturalSize]; 
    // 1 - set up the overlay 
    CALayer *overlayLayer = [CALayer layer]; 
    UIImage *overlayImage = nil; 
    //overlayLayer.backgroundColor = [UIColor whiteColor].CGColor; 
    overlayImage = [UIImage imageNamed:@"overlayImage.png"]; 


    [overlayLayer setContents:(id)[overlayImage CGImage]]; 
    overlayLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height); 
    [overlayLayer setMasksToBounds:YES]; 

    // 2 - set up the parent layer 
    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 
    parentLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height); 
    videoLayer.frame = CGRectMake(0, 0, videoTrack.naturalSize.width, videoTrack.naturalSize.height); 
    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:overlayLayer]; 

    // 3 - apply magic 
    mainCompositionInst.animationTool = [AVVideoCompositionCoreAnimationTool 
           videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 


// 4 - Get path 
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 
NSString *documentsDirectory = [paths objectAtIndex:0]; 
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent: 
         [NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]]; 
NSURL *url = [NSURL fileURLWithPath:myPathDocs]; 

// 5 - Create exporter 
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition 
                    presetName:AVAssetExportPresetHighestQuality]; 
exporter.outputURL=url; 
exporter.outputFileType = AVFileTypeQuickTimeMovie; 
exporter.shouldOptimizeForNetworkUse = YES; 
exporter.videoComposition = mainCompositionInst; 
[exporter exportAsynchronouslyWithCompletionHandler:^{ 
    dispatch_async(dispatch_get_main_queue(), ^{ 
     //[self exportDidFinish:exporter]; 
     //do stuff 

    }); 
}]; 
+0

@VijayKachhadiya Hey Ich habe versucht, das Tutorial, das Sie gepostet, aber es funktioniert immer noch nicht. Das ist mein Code. Dachte, du könntest vielleicht helfen. – csweeney285

Antwort

7

Ich bin kein Experte in Objective-C. Ich mache gerade ein iOS-Projekt (in Swift 3), das eine Textüberlagerung auf Video mit einem bestimmten Zeitbereich erfordert. Unten Code funktioniert perfekt in meinem Projekt. Du kannst es versuchen. Dein Video wird im Dokumentenverzeichnis gespeichert. Sie müssen daher beim Export die temporäre Videodatei löschen. Unter dem Code exportiere ich das Video in meine Fotobibliothek. Sie müssen also importieren Fotos Framework.

let composition = AVMutableComposition() 
    let asset = AVURLAsset(url: urlVideo!, options: nil) 

    let track = asset.tracks(withMediaType: AVMediaTypeVideo) 
    let videoTrack:AVAssetTrack = track[0] as AVAssetTrack 
    let timerange = CMTimeRangeMake(kCMTimeZero, asset.duration) 

    let compositionVideoTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: CMPersistentTrackID()) 

    do { 
     try compositionVideoTrack.insertTimeRange(timerange, of: videoTrack, at: kCMTimeZero) 
     compositionVideoTrack.preferredTransform = videoTrack.preferredTransform 
    } catch { 
     print(error) 
    } 

    let compositionAudioTrack:AVMutableCompositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeAudio, preferredTrackID: CMPersistentTrackID()) 

    for audioTrack in asset.tracks(withMediaType: AVMediaTypeAudio) { 
     do { 
      try compositionAudioTrack.insertTimeRange(audioTrack.timeRange, of: audioTrack, at: kCMTimeZero) 
     } catch { 
      print(error) 
     } 

    } 

    let size = videoTrack.naturalSize 

    //print("Size of vieo: \(size)") 

    let textLayer = CATextLayer() 
    textLayer.string = "Text Overlay Test" 
    textLayer.fontSize = 35 
    textLayer.font = "Baskerville-BoldItalic" as CFTypeRef 
    textLayer.foregroundColor = UIColor.red.cgColor 

    textLayer.opacity = 0 
    textLayer.alignmentMode = kCAAlignmentCenter 


    textLayer.frame = CGRect(x: 0, y: 100, width: size.width, height: size.height/6) 





    // making a time range to show text 
    let fadeInAnimation = CABasicAnimation(keyPath: "opacity") 
    fadeInAnimation.duration = 10 
    fadeInAnimation.fromValue = Int(1.0) 
    fadeInAnimation.toValue = Int(1.0) 
    fadeInAnimation.beginTime = 3 
    fadeInAnimation.isRemovedOnCompletion = false 

    textLayer.add(fadeInAnimation, forKey: "opacity") 


    // video layer making 

    let videolayer = CALayer() 
    videolayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height) 

    let parentlayer = CALayer() 
    parentlayer.frame = CGRect(x: 0, y: 0, width: size.width, height: size.height) 
    parentlayer.addSublayer(videolayer) 
    parentlayer.addSublayer(textLayer) 

    // adding the magic 
    let layercomposition = AVMutableVideoComposition() 
    layercomposition.frameDuration = CMTimeMake(1, 30) 
    layercomposition.renderSize = size 
    layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayer: videolayer, in: parentlayer) 

    let instruction = AVMutableVideoCompositionInstruction() 
    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, composition.duration) 
    let videotrack = composition.tracks(withMediaType: AVMediaTypeVideo)[0] as AVAssetTrack 
    let layerinstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videotrack) 
    instruction.layerInstructions = [layerinstruction] 
    layercomposition.instructions = [instruction] 

    // create new file to receive data 
    let mergedVideoURL = NSURL(fileURLWithPath: NSHomeDirectory() + "/Documents/mergedVideo.mp4") 

    // must delete existing temporary file from file directory, also use try catch 
    do { 
     try FileManager.default.removeItem(at: mergedVideoURL as URL) 
     print("Tyr Excepted") 
    } catch _ as NSError { 
     print("Error") 
    } 



    // use AVAssetExportSession to export video 
    guard let assetExport = AVAssetExportSession(asset: composition, presetName:AVAssetExportPresetMediumQuality) else {return} 
    assetExport.videoComposition = layercomposition 
    assetExport.outputFileType = AVFileTypeMPEG4 
    assetExport.outputURL = mergedVideoURL as URL 
    assetExport.exportAsynchronously(completionHandler: { 
     switch assetExport.status{ 
     case AVAssetExportSessionStatus.failed: 
      print("failed") 
     case AVAssetExportSessionStatus.cancelled: 
      print("cancelled") 
     default: 
      print("Exported") 
      PHPhotoLibrary.shared().performChanges({ 
       PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: mergedVideoURL as URL) 
      }) { saved, error in 
       if saved { 
        let alertController = UIAlertController(title: "Your video was successfully saved", message: nil, preferredStyle: .alert) 
        let defaultAction = UIAlertAction(title: "OK", style: .default, handler: nil) 
        alertController.addAction(defaultAction) 
        self.present(alertController, animated: true, completion: nil) 
       } 
      } 


     } 
    }) 
Verwandte Themen