iosswiftsprite-kitscenekitarkit

ChromaKey video in ARKit


I am trying to chroma key a video in ARKit, and I have done very similar to what @Felix has done here: GPUImageView inside SKScene as SKNode material - Playing transparent video on ARKit

But, when the video is supposed to show up (in this case, when an AR reference image is detected) I get a [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef) error and the video no longer plays. It did play before I implemented the chromaKeyMaterial. Here is my code, starting from just after the AR refernce image is detected:

DispatchQueue.main.async {
let filePath = Bundle.main.path(forResource: "wigz", ofType: "mp4")
let videoURL = NSURL(fileURLWithPath: filePath!)
let player = AVPlayer(url: videoURL as URL)

let spriteKitScene = SKScene(size: CGSize(width: 640, height: 480))
let videoSpriteKitNode = SKVideoNode(avPlayer: player)
let videoNode = SCNNode()
videoNode.geometry = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width,
              height: imageAnchor.referenceImage.physicalSize.height)
videoNode.eulerAngles = SCNVector3(-Float.pi/2, 0, 0)

// Use spritekit with videonode inside
spriteKitScene.scaleMode = .aspectFit
videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2,
                      y: spriteKitScene.size.height / 2)
videoSpriteKitNode.size = spriteKitScene.size
videoSpriteKitNode.yScale = -1.0
videoSpriteKitNode.play()

// Loop video
NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
    player.seek(to: kCMTimeZero)
    player.play()
}

spriteKitScene.addChild(videoSpriteKitNode)

videoNode.geometry?.firstMaterial?.diffuse.contents = spriteKitScene
videoNode.geometry?.firstMaterial?.isDoubleSided = true
let chromaKeyMaterial = ChromaKeyMaterial()
chromaKeyMaterial.diffuse.contents = player
videoNode.geometry!.materials = [chromaKeyMaterial]

node.addChildNode(videoNode)

self.imageDetectView.scene.rootNode.addChildNode(node)
}

In the ChromaKeyMaterial.swift file, i have changed these lines to:

float maskY = 0.0 * c_colorToReplace.r + 1.0 * c_colorToReplace.g + 0.0 * c_colorToReplace.b;
float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
float maskCb = 0.5647 * (c_colorToReplace.b - maskY);

float Y = 0.0 * textureColor.r + 1.0 * textureColor.g + 0.0 * textureColor.b;
float Cr = 0.7132 * (textureColor.r - Y);
float Cb = 0.5647 * (textureColor.b - Y);

In an effort to chroma key out a pure green, but I am not sure if that is the correct approach.

Any help would be greatly appreciated!


Solution

  • Figured this out. I was setting my color to key out incorrectly (and even in the wrong place facepalm) and there seems to be a bug that prevents the video from playing unless you delay it a bit. That bug was supposedly fixed but it seems to not be the case.

    Here is my corrected and cleaned up code if anyone is interested (EDITED TO INCLUDE TIP FROM @mnuages) :

    // Get Video URL and create AV Player
    let filePath = Bundle.main.path(forResource: "VIDEO_FILE_NAME", ofType: "VIDEO_FILE_EXTENSION")
    let videoURL = NSURL(fileURLWithPath: filePath!)
    let player = AVPlayer(url: videoURL as URL)
    
    // Create SceneKit videoNode to hold the spritekit scene.
    let videoNode = SCNNode()
    
    // Set geometry of the SceneKit node to be a plane, and rotate it to be flat with the image
    videoNode.geometry = SCNPlane(width: imageAnchor.referenceImage.physicalSize.width,
                  height: imageAnchor.referenceImage.physicalSize.height)
    videoNode.eulerAngles = SCNVector3(-Float.pi/2, 0, 0)
    
    //Set the video AVPlayer as the contents of the video node's material.
    videoNode.geometry?.firstMaterial?.diffuse.contents = player
    videoNode.geometry?.firstMaterial?.isDoubleSided = true
    
    // Alpha transparancy stuff
    let chromaKeyMaterial = ChromaKeyMaterial()
    chromaKeyMaterial.diffuse.contents = player
    videoNode.geometry!.materials = [chromaKeyMaterial]
    
    //video does not start without delaying the player
    //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
    DispatchQueue.main.asyncAfter(deadline: .now() + 0.001) {
        player.seek(to:CMTimeMakeWithSeconds(1, 1000))
        player.play()
    }
    // Loop video
    NotificationCenter.default.addObserver(forName: .AVPlayerItemDidPlayToEndTime, object: player.currentItem, queue: .main) { _ in
        player.seek(to: kCMTimeZero)
        player.play()
    }
    
    // Add videoNode to ARAnchor
    node.addChildNode(videoNode)
    
    // Add ARAnchor node to the root node of the scene
    self.imageDetectView.scene.rootNode.addChildNode(node)
    

    And here is the chrome key material

    import SceneKit
    
    public class ChromaKeyMaterial: SCNMaterial {
    
    public var backgroundColor: UIColor {
        didSet { didSetBackgroundColor() }
    }
    
    public var thresholdSensitivity: Float {
        didSet { didSetThresholdSensitivity() }
    }
    
    public var smoothing: Float  {
        didSet { didSetSmoothing() }
    }
    
    public init(backgroundColor: UIColor = .green, thresholdSensitivity: Float = 0.50, smoothing: Float = 0.001) {
    
        self.backgroundColor = backgroundColor
        self.thresholdSensitivity = thresholdSensitivity
        self.smoothing = smoothing
    
        super.init()
    
        didSetBackgroundColor()
        didSetThresholdSensitivity()
        didSetSmoothing()
    
        // chroma key shader is based on GPUImage
        // https://github.com/BradLarson/GPUImage/blob/master/framework/Source/GPUImageChromaKeyFilter.m
    
        let surfaceShader =
        """
    uniform vec3 c_colorToReplace;
    uniform float c_thresholdSensitivity;
    uniform float c_smoothing;
    
    #pragma transparent
    #pragma body
    
    vec3 textureColor = _surface.diffuse.rgb;
    
    float maskY = 0.2989 * c_colorToReplace.r + 0.5866 * c_colorToReplace.g + 0.1145 * c_colorToReplace.b;
    float maskCr = 0.7132 * (c_colorToReplace.r - maskY);
    float maskCb = 0.5647 * (c_colorToReplace.b - maskY);
    
    float Y = 0.2989 * textureColor.r + 0.5866 * textureColor.g + 0.1145 * textureColor.b;
    float Cr = 0.7132 * (textureColor.r - Y);
    float Cb = 0.5647 * (textureColor.b - Y);
    
    float blendValue = smoothstep(c_thresholdSensitivity, c_thresholdSensitivity + c_smoothing, distance(vec2(Cr, Cb), vec2(maskCr, maskCb)));
    
    float a = blendValue;
    _surface.transparent.a = a;
    """
    
        //_surface.transparent.a = a;
    
        shaderModifiers = [
            .surface: surfaceShader,
        ]
    }
    
    required public init?(coder aDecoder: NSCoder) {
        fatalError("init(coder:) has not been implemented")
    }
    
    //setting background color to be keyed out
    private func didSetBackgroundColor() {
        //getting pixel from background color
        //let rgb = backgroundColor.cgColor.components!.map{Float($0)}
        //let vector = SCNVector3(x: rgb[0], y: rgb[1], z: rgb[2])
        let vector = SCNVector3(x: 0.0, y: 1.0, z: 0.0)
        setValue(vector, forKey: "c_colorToReplace")
    }
    
    private func didSetSmoothing() {
        setValue(smoothing, forKey: "c_smoothing")
    }
    
    private func didSetThresholdSensitivity() {
        setValue(thresholdSensitivity, forKey: "c_thresholdSensitivity")
    }
    }