如何从ARKit录制视频?

时间:2017-06-28 02:40:45

标签: ios opengl-es scenekit arkit

现在我正在测试ARKit / SceneKit实现。 屏幕的基本渲染有点工作,所以我想尝试将我在屏幕上看到的内容录制成视频。

只是为了录制Scene Kit我找到了this Gist

//
//  ViewController.swift
//  SceneKitToVideo
//
//  Created by Lacy Rhoades on 11/29/16.
//  Copyright © 2016 Lacy Rhoades. All rights reserved.
//

import SceneKit
import GPUImage
import Photos

class ViewController: UIViewController {

    // Renders a scene (and shows it on the screen)
    var scnView: SCNView!

    // Another renderer
    var secondaryRenderer: SCNRenderer?

    // Abducts image data via an OpenGL texture
    var textureInput: GPUImageTextureInput?

    // Recieves image data from textureInput, shows it on screen
    var gpuImageView: GPUImageView!

    // Recieves image data from the textureInput, writes to a file
    var movieWriter: GPUImageMovieWriter?

    // Where to write the output file
    let path = NSTemporaryDirectory().appending("tmp.mp4")

    // Output file dimensions
    let videoSize = CGSize(width: 800.0, height: 600.0)

    // EAGLContext in the sharegroup with GPUImage
    var eaglContext: EAGLContext!

    override func viewDidLoad() {
        super.viewDidLoad()

        let group = GPUImageContext.sharedImageProcessing().context.sharegroup
        self.eaglContext = EAGLContext(api: .openGLES2, sharegroup: group )
        let options = ["preferredRenderingAPI": SCNRenderingAPI.openGLES2]

        // Main view with 3d in it
        self.scnView = SCNView(frame: CGRect.zero, options: options)
        self.scnView.preferredFramesPerSecond = 60
        self.scnView.eaglContext = eaglContext
        self.scnView.translatesAutoresizingMaskIntoConstraints = false
        self.view.addSubview(self.scnView)

        // Secondary renderer for rendering to an OpenGL framebuffer
        self.secondaryRenderer = SCNRenderer(context: eaglContext, options: options)

        // Output of the GPUImage pipeline
        self.gpuImageView = GPUImageView()
        self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false
        self.view.addSubview(self.gpuImageView)

        self.setupConstraints()

        self.setupScene()

        self.setupMovieWriter()

        DispatchQueue.main.async {
            self.setupOpenGL()
        }
    }

    func setupConstraints() {
        let relativeWidth: CGFloat = 0.8

        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .width, relatedBy: .equal, toItem: self.view, attribute: .width, multiplier: relativeWidth, constant: 0))
        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .centerX, relatedBy: .equal, toItem: self.view, attribute: .centerX, multiplier: 1, constant: 0))

        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .width, relatedBy: .equal, toItem: self.view, attribute: .width, multiplier: relativeWidth, constant: 0))
        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .centerX, relatedBy: .equal, toItem: self.view, attribute: .centerX, multiplier: 1, constant: 0))

        self.view.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "V:|-(==30.0)-[scnView]-(==30.0)-[gpuImageView]", options: [], metrics: [:], views: ["gpuImageView": gpuImageView, "scnView": scnView]))

        let videoRatio = self.videoSize.width / self.videoSize.height
        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .width, relatedBy: .equal, toItem: self.scnView, attribute: .height, multiplier: videoRatio, constant: 1))
        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .width, relatedBy: .equal, toItem: self.gpuImageView, attribute: .height, multiplier: videoRatio, constant: 1))
    }

    override func viewDidAppear(_ animated: Bool) {
        self.cameraBoxNode.runAction(
            SCNAction.repeatForever(
                SCNAction.rotateBy(x: 0.0, y: -2 * CGFloat.pi, z: 0.0, duration: 8.0)
            )
        )

        self.scnView.isPlaying = true

        Timer.scheduledTimer(withTimeInterval: 5.0, repeats: false, block: {
            timer in
            self.startRecording()
        })
    }

    var scene: SCNScene!
    var geometryNode: SCNNode!
    var cameraNode: SCNNode!
    var cameraBoxNode: SCNNode!
    var imageMaterial: SCNMaterial!
    func setupScene() {
        self.imageMaterial = SCNMaterial()
        self.imageMaterial.isDoubleSided = true
        self.imageMaterial.diffuse.contentsTransform = SCNMatrix4MakeScale(-1, 1, 1)
        self.imageMaterial.diffuse.wrapS = .repeat
        self.imageMaterial.diffuse.contents = UIImage(named: "pano_01")

        self.scene = SCNScene()

        let sphere = SCNSphere(radius: 100.0)
        sphere.materials = [imageMaterial!]
        self.geometryNode = SCNNode(geometry: sphere)
        self.geometryNode.position = SCNVector3Make(0.0, 0.0, 0.0)
        scene.rootNode.addChildNode(self.geometryNode)

        self.cameraNode = SCNNode()
        self.cameraNode.camera = SCNCamera()
        self.cameraNode.camera?.yFov = 72.0
        self.cameraNode.position = SCNVector3Make(0, 0, 0)
        self.cameraNode.eulerAngles = SCNVector3Make(0.0, 0.0, 0.0)

        self.cameraBoxNode = SCNNode()
        self.cameraBoxNode.addChildNode(self.cameraNode)
        scene.rootNode.addChildNode(self.cameraBoxNode)

        self.scnView.scene = scene
        self.scnView.backgroundColor = UIColor.darkGray
        self.scnView.autoenablesDefaultLighting = true
    }

    func setupMovieWriter() {
        let _ = FileUtil.mkdirUsingFile(path)
        let _ = FileUtil.unlink(path)
        let url = URL(fileURLWithPath: path)
        self.movieWriter = GPUImageMovieWriter(movieURL: url, size: self.videoSize)
    }

    let glRenderQueue = GPUImageContext.sharedContextQueue()!
    var outputTexture: GLuint = 0
    var outputFramebuffer: GLuint = 0
    func setupOpenGL() {
        self.glRenderQueue.sync {
            let context = EAGLContext.current()
            if context != self.eaglContext {
                EAGLContext.setCurrent(self.eaglContext)
            }

            glGenFramebuffers(1, &self.outputFramebuffer)
            glBindFramebuffer(GLenum(GL_FRAMEBUFFER), self.outputFramebuffer)

            glGenTextures(1, &self.outputTexture)
            glBindTexture(GLenum(GL_TEXTURE_2D), self.outputTexture)
        }

        // Pipe the texture into GPUImage-land
        self.textureInput = GPUImageTextureInput(texture: self.outputTexture, size: self.videoSize)

        let rotate = GPUImageFilter()
        rotate.setInputRotation(kGPUImageFlipVertical, at: 0)
        self.textureInput?.addTarget(rotate)
        rotate.addTarget(self.gpuImageView)

        if let writer = self.movieWriter {
            rotate.addTarget(writer)
        }

        // Call me back on every render
        self.scnView.delegate = self
    }

    func renderToFramebuffer(atTime time: TimeInterval) {
        self.glRenderQueue.sync {
            let context = EAGLContext.current()
            if context != self.eaglContext {
                EAGLContext.setCurrent(self.eaglContext)
            }

            objc_sync_enter(self.eaglContext)

            let width = GLsizei(self.videoSize.width)
            let height = GLsizei(self.videoSize.height)

            glBindFramebuffer(GLenum(GL_FRAMEBUFFER), self.outputFramebuffer)
            glBindTexture(GLenum(GL_TEXTURE_2D), self.outputTexture)

            glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, width, height, 0, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), nil)

            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)

            glFramebufferTexture2D(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_TEXTURE_2D), self.outputTexture, 0)

            glViewport(0, 0, width, height)

            glClear(GLbitfield(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT))

            self.secondaryRenderer?.render(atTime: time)

            self.videoBuildingQueue.sync {
                self.textureInput?.processTexture(withFrameTime: CMTime(seconds: time, preferredTimescale: 100000))
            }

            objc_sync_exit(self.eaglContext)
        }

    }

    func startRecording() {
        self.startRecord()
        Timer.scheduledTimer(withTimeInterval: 24.0, repeats: false, block: {
            timer in
            self.stopRecord()
        })
    }

    let videoBuildingQueue = DispatchQueue.global(qos: .default)

    func startRecord() {
        self.videoBuildingQueue.sync {
            //inOrientation: CGAffineTransform(scaleX: 1.0, y: -1.0)
            self.movieWriter?.startRecording()
        }
    }

    var renderStartTime: TimeInterval = 0

    func stopRecord() {
        self.videoBuildingQueue.sync {
            self.movieWriter?.finishRecording(completionHandler: {
                self.saveFileToCameraRoll()
            })
        }
    }

    func saveFileToCameraRoll() {
        assert(FileUtil.fileExists(self.path), "Check for file output")

        DispatchQueue.global(qos: .utility).async {
            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: self.path))
            }) { (done, err) in
                if err != nil {
                    print("Error creating video file in library")
                    print(err.debugDescription)
                } else {
                    print("Done writing asset to the user's photo library")
                }
            }
        }
    }

}

extension ViewController: SCNSceneRendererDelegate {
    func renderer(_ renderer: SCNSceneRenderer, didRenderScene scene: SCNScene, atTime time: TimeInterval) {
        self.secondaryRenderer?.scene = scene
        self.secondaryRenderer?.pointOfView = (renderer as! SCNView).pointOfView
        self.renderToFramebuffer(atTime: time)
    }
}

但这不会从设备相机渲染图像。

所以我开始寻找这样做的方法。 到目前为止,我找到了通过访问ARFrame将捕获的图像作为CVImageBufferRef获取的方法。 Apple的GLCameraRipple示例似乎帮助我从中获取OpenGL纹理。

但我的问题是如何在渲染循环中绘制它。 对于OpenGL经验丰富的人来说这可能是显而易见的,但我对OpenGL知之甚少,所以我无法弄清楚如何将其添加到上面的代码中。

7 个答案:

答案 0 :(得分:4)

您可以使用ReplayKit,ARKit和SceneKit内容记录屏幕上显示的所有内容(或直播到Twitch等服务)。

(正如Apple在WWDC上所指出的,ReplayKit实际上是iOS 11中控制中心屏幕录制功能的基础。)

答案 1 :(得分:1)

不知道你现在是否已经设法回答这个问题,但是lacyrhoades,你所引用的类的人,已经在github上发布了另一个似乎正在做你所要求的项目。我已经使用它,它设法用AR对象和相机输入记录SceneView。您可以通过以下链接找到它:

https://github.com/lacyrhoades/SCNKit2Video

如果你想将它与AR一起使用,你必须将ARSceneView配置为他正在制作的项目,因为他只运行一个SceneView,而不是AR。

希望它有所帮助。

答案 2 :(得分:1)

我刚刚找到了这个名为ARVideoKit的框架,它似乎很容易实现,而且它们具有更多功能,例如捕获GIF和Live Photos。

框架官方回购是:https://github.com/AFathi/ARVideoKit/

要安装它,您必须克隆存储库并将.framework文件拖到项目的嵌入式二进制文件中。

然后实现非常简单:

    您在import ARVideoKit班级

    中的
  1. UIViewController

  2. 创建RecordAR?变量

    var videoRec:RecordAR?

  3. viewDidLoad

    中初始化您的变量

    videoRec = RecordAR(ARSpriteKit:sceneView)

  4. RecordAR

    中准备viewWillAppear

    videoRec.prepare(configuration)

  5. 开始录制视频

    videoRec.record()

  6. 停止并导出到相机胶卷!

    videoRec.stopAndExport()

  7. 查看框架的文档,它支持更多功能使用!

    您可以在此处找到他们的文档:https://github.com/AFathi/ARVideoKit/wiki

    希望有所帮助!

答案 3 :(得分:1)

ReplayKit并不是一个好的解决方案,因为给用户显示了一个难看的权限对话框,并且您还必须解决该问题,以记录UI元素。您对视频分辨率的控制也较少。

相反,您应该使用ARKit返回的捕获帧CVPixelBuffer,并像记录从摄像机捕获的帧一样处理它。假设您需要处理视频帧,则可能还需要使用诸如Metal的框架来处理图形。这并不简单。请参阅此处提供的答案: How to record video in RealityKit?

答案 4 :(得分:0)

Swift 5

您可以使用此 ARCapture 框架从 ARKit 视图录制视频

private var capture: ARCapture?
...

override func viewDidLoad() {
    super.viewDidLoad()

    // Create a new scene
    let scene = SCNScene()
    ...
    // TODO Setup ARSCNView with the scene
    // sceneView.scene = scene
    
    // Setup ARCapture
    capture = ARCapture(view: sceneView)

}

/// "Record" button action handler
@IBAction func recordAction(_ sender: UIButton) {
    capture?.start()
}

/// "Stop" button action handler
@IBAction func stopAction(_ sender: UIButton) {
    capture?.stop({ (status) in
        print("Video exported: \(status)")
    })
}

调用 ARCapture.stop 后,视频将显示在照片应用中。

答案 5 :(得分:-2)

如果您可以将设备连接到Mac,只需使用QuickTime Player即可从iOS设备录制屏幕(和声音)。

在QuickTime中,在“文件”菜单中选择新的“电影录制”,然后在大红色录制按钮附近的录制对话框中有一个小的下拉箭头,您可以在其中选择音频输入和视频输入。在那里选择你的i-device,你就可以开始了。

答案 6 :(得分:-4)

如果您使用的是ARKit,那么您使用的是iOS 11。

iOS 11具有内置屏幕录制功能(支持麦克风)。

然而,从iOS 11 Beta 2开始,这是一个小小的错误 - 但确实有效。

总结:

  • 在“设置”,“控制中心”,“自定义”中:添加屏幕录制小部件
  • 在控制中心(双击主页按钮或从屏幕底部向上滑动),在屏幕录制按钮上强制点击(或长按)以配置是否需要麦克风记录
  • 点击屏幕录制按钮开始录制
  • 运行您的应用
  • 返回控制中心并点按屏幕录制以停止录制
  • 视频在你的照片卷中。

有缺陷的部分是它不适用于某些横向应用程序,它并不总是记录。

以下是我编写的有关更详细使用它的教程:

http://talesfromtherift.com/how-to-screen-record-arkit/