2017-12-06 4 views
4

Ich versuche, ein Video zu spielen, zeigt Transparenz in einem ARSCNView. Eine SCNPlane wird als Projektionsfläche für das Video verwendet und ich versuche, dieses Video mit GPUImage Farbe zu färben.Farb-Keying-Video mit GPUImage auf einem SCNPlane in ARKit

Ich folgte this example hier. Leider habe ich keine Möglichkeit gefunden, das Video auf meine videoSpriteKitNode zurück zu projizieren. Weil der Filter in einem GPUImageView gerendert wird, und der ein nimmt.

Ich bin nicht sicher, ob es überhaupt möglich ist, was ich versuche zu tun, also wenn jemand ihre Einblicke teilen könnte, wäre ich sehr dankbar! Also, ich jetzt geschafft, Chroma-Key und spielen eine jetzt transparent Video in ARSCNView, aber es ist immer noch ein wenig lückenhaft Lösung

import UIKit 
import ARKit 
import GPUImage 

class ARTransVC: UIViewController{ 

@IBOutlet weak var sceneView: ARSCNView! 
let configuration = ARWorldTrackingConfiguration() 
var movie: GPUImageMovie! 
var filter: GPUImageChromaKeyBlendFilter! 
var sourcePicture: GPUImagePicture! 
var player = AVPlayer() 
var gpuImageView: GPUImageView! 


override func viewDidLoad() { 
    super.viewDidLoad() 
    self.sceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints] 
    self.sceneView.session.run(configuration) 

    self.gpuImageView = GPUImageView() 
    self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false 

    //a delay for ARKit to capture the surroundings 
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) { 

     // A SpriteKit scene to contain the SpriteKit video node 
     let spriteKitScene = SKScene(size: CGSize(width: self.sceneView.frame.width, height: self.sceneView.frame.height)) 
     spriteKitScene.scaleMode = .aspectFit 

     // Create a video player, which will be responsible for the playback of the video material 
     guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { return } 
     let playerItem = AVPlayerItem(url: url) 
     self.player.replaceCurrentItem(with: playerItem) 

     //trans 
     self.filter = GPUImageChromaKeyBlendFilter() 
     self.filter.thresholdSensitivity = 0.15 
     self.filter.smoothing = 0.3 
     self.filter.setColorToReplaceRed(0.322, green: 0.455, blue: 0.831) 

     self.movie = GPUImageMovie(playerItem: playerItem) 
     self.movie.playAtActualSpeed = true 
     self.movie.addTarget(self.filter) 
     self.movie.startProcessing() 

     let backgroundImage = UIImage(named: "transparent.png") 
     self.sourcePicture = GPUImagePicture(image: backgroundImage, smoothlyScaleOutput: true)! 
     self.sourcePicture.addTarget(self.filter) 
     self.sourcePicture.processImage() 

     ///HERE DON'T KNOW HOW TO CONTINUE ? 
     self.filter.addTarget(self.gpuImageView) 


     // To make the video loop 
     self.player.actionAtItemEnd = .none 
     NotificationCenter.default.addObserver(
      self, 
      selector: #selector(ARTransVC.playerItemDidReachEnd), 
      name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, 
      object: self.player.currentItem) 

     // Create the SpriteKit video node, containing the video player 
     let videoSpriteKitNode = SKVideoNode(avPlayer: self.player) 
     videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width/2.0, y: spriteKitScene.size.height/2.0) 
     videoSpriteKitNode.size = spriteKitScene.size 
     videoSpriteKitNode.yScale = -1.0 
     videoSpriteKitNode.play() 
     spriteKitScene.addChild(videoSpriteKitNode) 

     // Create the SceneKit scene 
     let scene = SCNScene() 
     self.sceneView.scene = scene 
     self.sceneView.isPlaying = true 

     // Create a SceneKit plane and add the SpriteKit scene as its material 
     let background = SCNPlane(width: CGFloat(1), height: CGFloat(1)) 
     background.firstMaterial?.diffuse.contents = spriteKitScene 
     let backgroundNode = SCNNode(geometry: background) 
     backgroundNode.geometry?.firstMaterial?.isDoubleSided = true 

     backgroundNode.position = SCNVector3(0,0,-2.0) 
     scene.rootNode.addChildNode(backgroundNode) 
    } 
} 

@objc func playerItemDidReachEnd(notification: NSNotification) { 
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem { 
     playerItem.seek(to: kCMTimeZero, completionHandler: nil) 
    } 
} 
} 

Antwort

1

versuchen, den Hintergrund Clearing und die Scalemode mit

backgroundColor = .clear 
scaleMode = .aspectFit 
+0

das hat mich auf den richtigen Weg gebracht! Danke – Felix

3

gesetzt.

Ich trat von meinem früheren Ansatz und implementierte ChromaKeyMaterial von Lësha Turkowski!

Hier ist es, auf die Farbe angepasst I Schlüssel gesucht:

import SceneKit 

public class ChromaKeyMaterial: SCNMaterial { 

public var backgroundColor: UIColor { 
    didSet { didSetBackgroundColor() } 
} 

public var thresholdSensitivity: Float { 
    didSet { didSetThresholdSensitivity() } 
} 

public var smoothing: Float { 
    didSet { didSetSmoothing() } 
} 

public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.15, smoothing: Float = 0.0) { 

    self.backgroundColor = backgroundColor 
    self.thresholdSensitivity = thresholdSensitivity 
    self.smoothing = smoothing 

    super.init() 

    didSetBackgroundColor() 
    didSetThresholdSensitivity() 
    didSetSmoothing() 

    // chroma key shader is based on GPUImage 
    // https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m 

    let surfaceShader = 
    """ 
    uniform vec3 c_colorToReplace; 
    uniform float c_thresholdSensitivity; 
    uniform float c_smoothing; 

    #pragma transparent 
    #pragma body 

    vec3 textureColor = _surface.diffuse.rgb; 

    float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b; 
    float maskCr = 0.7132 * (c_colorToReplace.r - maskY); 
    float maskCb = 0.5647 * (c_colorToReplace.b - maskY); 

    float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b; 
    float Cr = 0.7132 * (textureColor.r - Y); 
    float Cb = 0.5647 * (textureColor.b - Y); 

    float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb))); 

    float a = blendValue; 
    _surface.transparent.a = a; 
    """ 

    //_surface.transparent.a = a; 

    shaderModifiers = [ 
     .surface: surfaceShader, 
    ] 
} 

required public init?(coder aDecoder: NSCoder) { 
    fatalError("init(coder:) has not been implemented") 
} 

//setting background color to be keyed out 
private func didSetBackgroundColor() { 
//getting pixel from background color 
//let rgb = backgroundColor.cgColor.components!.map{Float($0)} 
//let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2]) 
    let vector = SCNVector3(x: 0.216, y: 0.357, z: 0.663) 
    setValue(vector, forKey: "c_colorToReplace") 
} 

private func didSetSmoothing() { 
    setValue(smoothing, forKey: "c_smoothing") 
} 

private func didSetThresholdSensitivity() { 
    setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity") 
} 
} 

Hier ist der Code, der die Schlüssel Video in Arkit auf einem SCNPlane spielt:

import UIKit 
import ARKit 

class ARTransVC: UIViewController{ 

@IBOutlet weak var arSceneView: ARSCNView! 
let configuration = ARWorldTrackingConfiguration() 

private var player: AVPlayer = { 
    guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() } 
    return AVPlayer(url: url) 
}() 

override func viewDidLoad() { 
    super.viewDidLoad() 
    self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints] 
    self.arSceneView.session.run(configuration) 

    //a delay for ARKit to capture the surroundings 
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) { 

     // A SpriteKit scene to contain the SpriteKit video node 
     let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height)) 
     spriteKitScene.scaleMode = .aspectFit 
     spriteKitScene.backgroundColor = .clear 
     spriteKitScene.scaleMode = .aspectFit 

     //Create the SpriteKit video node, containing the video player 
     let videoSpriteKitNode = SKVideoNode(avPlayer: self.player) 
     videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width/2.0, y: spriteKitScene.size.height/2.0) 
     videoSpriteKitNode.size = spriteKitScene.size 
     videoSpriteKitNode.yScale = -1.0 
     videoSpriteKitNode.play() 
     spriteKitScene.addChild(videoSpriteKitNode) 

     // To make the video loop 
     self.player.actionAtItemEnd = .none 
     NotificationCenter.default.addObserver(
      self, 
      selector: #selector(ARTransVC.playerItemDidReachEnd), 
      name: NSNotification.Name.AVPlayerItemDidPlayToEndTime, 
      object: self.player.currentItem) 

     // Create the SceneKit scene 
     let scene = SCNScene() 
     self.arSceneView.scene = scene 

     //Create a SceneKit plane and add the SpriteKit scene as its material 
     let background = SCNPlane(width: CGFloat(1), height: CGFloat(1)) 
     background.firstMaterial?.diffuse.contents = spriteKitScene 
     let chromaKeyMaterial = ChromaKeyMaterial() 
     chromaKeyMaterial.diffuse.contents = self.player 

     let backgroundNode = SCNNode(geometry: background) 
     backgroundNode.geometry?.firstMaterial?.isDoubleSided = true 
     backgroundNode.geometry!.materials = [chromaKeyMaterial] 

     backgroundNode.position = SCNVector3(0,0,-2.0) 
     scene.rootNode.addChildNode(backgroundNode) 

     //video does not start without delaying the player 
     //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef) 
     DispatchQueue.main.asyncAfter(deadline: .now() + 1) { 
      self.player.seek(to:CMTimeMakeWithSeconds(1, 1000)) 
      self.player.play() 
     } 
    } 
} 

@objc func playerItemDidReachEnd(notification: NSNotification) { 
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem { 
     playerItem.seek(to: kCMTimeZero, completionHandler: nil) 
    } 
} 

Ich war immer ein [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef), die offenbar in iOS 11.2 fixed war. Für den Moment habe ich eine eher lückenhafte Lösung gefunden, als ich das Video nach einer Sekunde Verzögerung neu startete. Ein besserer Ansatz dafür wird sehr geschätzt.