SKScene内部的GPUImageView作为SKNode材质 - 在ARKit上播放透明视频

时间:2017-11-13 13:28:57

标签: gpuimage arkit skscene

在Project -A中,我使用GPUImageView显示具有透明度的视频(在绿屏上录制)。使用GPUImageChromaKeyBlendFilter等。 并且工作精湛。

基于ARKIT的另一个项目-B-在空间中向我显示了VIDEO的平面,并且使用SKVideoNode和AVPlayer也可以正常工作。

现在的问题是将它们组合在一起:)所以在太空中我想要显示视频但具有透明度...... 不幸的是,我无法在任何SpriteKit元素上渲染GPUImageView,然后添加到SKScene,这是SCNPlane的动画纹理,它有可能吗?或者也许还有其他方法使用ARKit渲染带有透明度的视频。?

任何建议的答案

1 个答案:

答案 0 :(得分:0)

我有同样的任务!请参阅我的解决方案here

我基本上从ChromaKeyMaterial实施了Lësha Turkowski并编写了此代码来播放和播放视频。

import UIKit
import ARKit

class ARTransVC: UIViewController{

@IBOutlet weak var arSceneView: ARSCNView!
let configuration = ARWorldTrackingConfiguration()

private var player: AVPlayer = {
    guard let url = Bundle.main.url(forResource: "FY3A4278", withExtension: "mp4") else { fatalError() }
    return AVPlayer(url: url)
}()

override func viewDidLoad() {
    super.viewDidLoad()
    self.arSceneView.debugOptions = [ARSCNDebugOptions.showWorldOrigin, ARSCNDebugOptions.showFeaturePoints]
    self.arSceneView.session.run(configuration)

    //a delay for ARKit to capture the surroundings
    DispatchQueue.main.asyncAfter(deadline: .now() + 3) {

        // A SpriteKit scene to contain the SpriteKit video node
        let spriteKitScene = SKScene(size: CGSize(width: self.arSceneView.frame.width, height: self.arSceneView.frame.height))
        spriteKitScene.scaleMode = .aspectFit
        spriteKitScene.backgroundColor = .clear
        spriteKitScene.scaleMode = .aspectFit

        //Create the SpriteKit video node, containing the video player
        let videoSpriteKitNode = SKVideoNode(avPlayer: self.player)
        videoSpriteKitNode.position = CGPoint(x: spriteKitScene.size.width / 2.0, y: spriteKitScene.size.height / 2.0)
        videoSpriteKitNode.size = spriteKitScene.size
        videoSpriteKitNode.yScale = -1.0
        videoSpriteKitNode.play()
        spriteKitScene.addChild(videoSpriteKitNode)

        // To make the video loop
        self.player.actionAtItemEnd = .none
        NotificationCenter.default.addObserver(
            self,
            selector: #selector(ARTransVC.playerItemDidReachEnd),
            name: NSNotification.Name.AVPlayerItemDidPlayToEndTime,
            object:  self.player.currentItem)

        // Create the SceneKit scene
        let scene = SCNScene()
        self.arSceneView.scene = scene

        //Create a SceneKit plane and add the SpriteKit scene as its material
        let background = SCNPlane(width: CGFloat(1), height: CGFloat(1))
        background.firstMaterial?.diffuse.contents = spriteKitScene
        let chromaKeyMaterial = ChromaKeyMaterial()
        chromaKeyMaterial.diffuse.contents = self.player

        let backgroundNode = SCNNode(geometry: background)
        backgroundNode.geometry?.firstMaterial?.isDoubleSided = true
        backgroundNode.geometry!.materials = [chromaKeyMaterial]

        backgroundNode.position = SCNVector3(0,0,-2.0)
        scene.rootNode.addChildNode(backgroundNode)

        //video does not start without delaying the player
        //playing the video before just results in [SceneKit] Error: Cannot get pixel buffer (CVPixelBufferRef)
        DispatchQueue.main.asyncAfter(deadline: .now() + 1) {
            self.player.seek(to:CMTimeMakeWithSeconds(1, 1000))
            self.player.play()
        }
    }
}

@objc func playerItemDidReachEnd(notification: NSNotification) {
    if let playerItem: AVPlayerItem = notification.object as? AVPlayerItem {
        playerItem.seek(to: kCMTimeZero, completionHandler: nil)
    }
}