15

Now I'm testing ARKit/SceneKit implementation. The basic rendering to the screen is kinda working so then I wanna try recording what I see on the screen into a video.

Just for recording of Scene Kit I found this Gist:

//
//  ViewController.swift
//  SceneKitToVideo
//
//  Created by Lacy Rhoades on 11/29/16.
//  Copyright © 2016 Lacy Rhoades. All rights reserved.
//

import SceneKit
import GPUImage
import Photos

class ViewController: UIViewController {

    // Renders a scene (and shows it on the screen)
    var scnView: SCNView!

    // Another renderer
    var secondaryRenderer: SCNRenderer?

    // Abducts image data via an OpenGL texture
    var textureInput: GPUImageTextureInput?

    // Recieves image data from textureInput, shows it on screen
    var gpuImageView: GPUImageView!

    // Recieves image data from the textureInput, writes to a file
    var movieWriter: GPUImageMovieWriter?

    // Where to write the output file
    let path = NSTemporaryDirectory().appending("tmp.mp4")

    // Output file dimensions
    let videoSize = CGSize(width: 800.0, height: 600.0)

    // EAGLContext in the sharegroup with GPUImage
    var eaglContext: EAGLContext!

    override func viewDidLoad() {
        super.viewDidLoad()

        let group = GPUImageContext.sharedImageProcessing().context.sharegroup
        self.eaglContext = EAGLContext(api: .openGLES2, sharegroup: group )
        let options = ["preferredRenderingAPI": SCNRenderingAPI.openGLES2]

        // Main view with 3d in it
        self.scnView = SCNView(frame: CGRect.zero, options: options)
        self.scnView.preferredFramesPerSecond = 60
        self.scnView.eaglContext = eaglContext
        self.scnView.translatesAutoresizingMaskIntoConstraints = false
        self.view.addSubview(self.scnView)

        // Secondary renderer for rendering to an OpenGL framebuffer
        self.secondaryRenderer = SCNRenderer(context: eaglContext, options: options)

        // Output of the GPUImage pipeline
        self.gpuImageView = GPUImageView()
        self.gpuImageView.translatesAutoresizingMaskIntoConstraints = false
        self.view.addSubview(self.gpuImageView)

        self.setupConstraints()

        self.setupScene()

        self.setupMovieWriter()

        DispatchQueue.main.async {
            self.setupOpenGL()
        }
    }

    func setupConstraints() {
        let relativeWidth: CGFloat = 0.8

        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .width, relatedBy: .equal, toItem: self.view, attribute: .width, multiplier: relativeWidth, constant: 0))
        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .centerX, relatedBy: .equal, toItem: self.view, attribute: .centerX, multiplier: 1, constant: 0))

        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .width, relatedBy: .equal, toItem: self.view, attribute: .width, multiplier: relativeWidth, constant: 0))
        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .centerX, relatedBy: .equal, toItem: self.view, attribute: .centerX, multiplier: 1, constant: 0))

        self.view.addConstraints(NSLayoutConstraint.constraints(withVisualFormat: "V:|-(==30.0)-[scnView]-(==30.0)-[gpuImageView]", options: [], metrics: [:], views: ["gpuImageView": gpuImageView, "scnView": scnView]))

        let videoRatio = self.videoSize.width / self.videoSize.height
        self.view.addConstraint(NSLayoutConstraint(item: self.scnView, attribute: .width, relatedBy: .equal, toItem: self.scnView, attribute: .height, multiplier: videoRatio, constant: 1))
        self.view.addConstraint(NSLayoutConstraint(item: self.gpuImageView, attribute: .width, relatedBy: .equal, toItem: self.gpuImageView, attribute: .height, multiplier: videoRatio, constant: 1))
    }

    override func viewDidAppear(_ animated: Bool) {
        self.cameraBoxNode.runAction(
            SCNAction.repeatForever(
                SCNAction.rotateBy(x: 0.0, y: -2 * CGFloat.pi, z: 0.0, duration: 8.0)
            )
        )

        self.scnView.isPlaying = true

        Timer.scheduledTimer(withTimeInterval: 5.0, repeats: false, block: {
            timer in
            self.startRecording()
        })
    }

    var scene: SCNScene!
    var geometryNode: SCNNode!
    var cameraNode: SCNNode!
    var cameraBoxNode: SCNNode!
    var imageMaterial: SCNMaterial!
    func setupScene() {
        self.imageMaterial = SCNMaterial()
        self.imageMaterial.isDoubleSided = true
        self.imageMaterial.diffuse.contentsTransform = SCNMatrix4MakeScale(-1, 1, 1)
        self.imageMaterial.diffuse.wrapS = .repeat
        self.imageMaterial.diffuse.contents = UIImage(named: "pano_01")

        self.scene = SCNScene()

        let sphere = SCNSphere(radius: 100.0)
        sphere.materials = [imageMaterial!]
        self.geometryNode = SCNNode(geometry: sphere)
        self.geometryNode.position = SCNVector3Make(0.0, 0.0, 0.0)
        scene.rootNode.addChildNode(self.geometryNode)

        self.cameraNode = SCNNode()
        self.cameraNode.camera = SCNCamera()
        self.cameraNode.camera?.yFov = 72.0
        self.cameraNode.position = SCNVector3Make(0, 0, 0)
        self.cameraNode.eulerAngles = SCNVector3Make(0.0, 0.0, 0.0)

        self.cameraBoxNode = SCNNode()
        self.cameraBoxNode.addChildNode(self.cameraNode)
        scene.rootNode.addChildNode(self.cameraBoxNode)

        self.scnView.scene = scene
        self.scnView.backgroundColor = UIColor.darkGray
        self.scnView.autoenablesDefaultLighting = true
    }

    func setupMovieWriter() {
        let _ = FileUtil.mkdirUsingFile(path)
        let _ = FileUtil.unlink(path)
        let url = URL(fileURLWithPath: path)
        self.movieWriter = GPUImageMovieWriter(movieURL: url, size: self.videoSize)
    }

    let glRenderQueue = GPUImageContext.sharedContextQueue()!
    var outputTexture: GLuint = 0
    var outputFramebuffer: GLuint = 0
    func setupOpenGL() {
        self.glRenderQueue.sync {
            let context = EAGLContext.current()
            if context != self.eaglContext {
                EAGLContext.setCurrent(self.eaglContext)
            }

            glGenFramebuffers(1, &self.outputFramebuffer)
            glBindFramebuffer(GLenum(GL_FRAMEBUFFER), self.outputFramebuffer)

            glGenTextures(1, &self.outputTexture)
            glBindTexture(GLenum(GL_TEXTURE_2D), self.outputTexture)
        }

        // Pipe the texture into GPUImage-land
        self.textureInput = GPUImageTextureInput(texture: self.outputTexture, size: self.videoSize)

        let rotate = GPUImageFilter()
        rotate.setInputRotation(kGPUImageFlipVertical, at: 0)
        self.textureInput?.addTarget(rotate)
        rotate.addTarget(self.gpuImageView)

        if let writer = self.movieWriter {
            rotate.addTarget(writer)
        }

        // Call me back on every render
        self.scnView.delegate = self
    }

    func renderToFramebuffer(atTime time: TimeInterval) {
        self.glRenderQueue.sync {
            let context = EAGLContext.current()
            if context != self.eaglContext {
                EAGLContext.setCurrent(self.eaglContext)
            }

            objc_sync_enter(self.eaglContext)

            let width = GLsizei(self.videoSize.width)
            let height = GLsizei(self.videoSize.height)

            glBindFramebuffer(GLenum(GL_FRAMEBUFFER), self.outputFramebuffer)
            glBindTexture(GLenum(GL_TEXTURE_2D), self.outputTexture)

            glTexImage2D(GLenum(GL_TEXTURE_2D), 0, GL_RGBA, width, height, 0, GLenum(GL_RGBA), GLenum(GL_UNSIGNED_BYTE), nil)

            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MAG_FILTER), GL_LINEAR)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_MIN_FILTER), GL_LINEAR)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_S), GL_CLAMP_TO_EDGE)
            glTexParameteri(GLenum(GL_TEXTURE_2D), GLenum(GL_TEXTURE_WRAP_T), GL_CLAMP_TO_EDGE)

            glFramebufferTexture2D(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_TEXTURE_2D), self.outputTexture, 0)

            glViewport(0, 0, width, height)

            glClear(GLbitfield(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT))

            self.secondaryRenderer?.render(atTime: time)

            self.videoBuildingQueue.sync {
                self.textureInput?.processTexture(withFrameTime: CMTime(seconds: time, preferredTimescale: 100000))
            }

            objc_sync_exit(self.eaglContext)
        }

    }

    func startRecording() {
        self.startRecord()
        Timer.scheduledTimer(withTimeInterval: 24.0, repeats: false, block: {
            timer in
            self.stopRecord()
        })
    }

    let videoBuildingQueue = DispatchQueue.global(qos: .default)

    func startRecord() {
        self.videoBuildingQueue.sync {
            //inOrientation: CGAffineTransform(scaleX: 1.0, y: -1.0)
            self.movieWriter?.startRecording()
        }
    }

    var renderStartTime: TimeInterval = 0

    func stopRecord() {
        self.videoBuildingQueue.sync {
            self.movieWriter?.finishRecording(completionHandler: {
                self.saveFileToCameraRoll()
            })
        }
    }

    func saveFileToCameraRoll() {
        assert(FileUtil.fileExists(self.path), "Check for file output")

        DispatchQueue.global(qos: .utility).async {
            PHPhotoLibrary.shared().performChanges({
                PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: URL(fileURLWithPath: self.path))
            }) { (done, err) in
                if err != nil {
                    print("Error creating video file in library")
                    print(err.debugDescription)
                } else {
                    print("Done writing asset to the user's photo library")
                }
            }
        }
    }

}

extension ViewController: SCNSceneRendererDelegate {
    func renderer(_ renderer: SCNSceneRenderer, didRenderScene scene: SCNScene, atTime time: TimeInterval) {
        self.secondaryRenderer?.scene = scene
        self.secondaryRenderer?.pointOfView = (renderer as! SCNView).pointOfView
        self.renderToFramebuffer(atTime: time)
    }
}

but this doesn't render the image from device camera.

So I started searching for the way to do that as well. So far I found the way to grab the captured image as CVImageBufferRef by accessing ARFrame. And Apple's GLCameraRipple example seems like help me getting the OpenGL texture out of it.

But my question is how to draw it in the rendering loop. This might be obvious for OpenGL-experienced ones but I only have very little knowledge about OpenGL so I cannot figure out how to add that to the above code.

genpfault
  • 51,148
  • 11
  • 85
  • 139
Takeshi Yokemura
  • 425
  • 1
  • 4
  • 17
  • Somewhat related, but you can record your screen on iOS 11 natively. Access the option from within the Control Center. You can add shortcut to it from the Settings / Control Center. – Tomislav Markovski Jun 28 '17 at 03:49
  • Thanks for the info. But I need to control the recording behavior, while Control Center doesn't give us the control over it – Takeshi Yokemura Jun 28 '17 at 05:41

6 Answers6

5

You can record everything seen on the screen (or live stream it to services like Twitch, for that matter) using ReplayKit, ARKit and SceneKit content included.

(As Apple pointed out at WWDC, ReplayKit is actually the basis for the Control Center screen recording feature in iOS 11.)

rickster
  • 124,678
  • 26
  • 272
  • 326
  • Thanks for the suggestion. Did you make sure that it works? I've just added simple RPScreenRecorder implementation onto this ARKit project http://viewar.com/apple-augmented-reality-arkit-for-ios/ but it only reports error -5804(failed) and give me nothing. Do you have any idea? Any additional settings needed? I believe my privacy setting is OK because the app will crash if we didn't provide enough privacy permission. – Takeshi Yokemura Jun 28 '17 at 05:50
  • I thought about this idea again and found that It is not a good idea to grab everything on the screen because it includes other UI element like rec button and progress bar, which are still important to show for the user experience. On top of that if I wanted it to be non full-screen in the future it will be super hard to make the change. So I still want the straight forward way to render it programatically – Takeshi Yokemura Jun 29 '17 at 01:52
  • @TakeshiYokemura I found [ScreenRecord](https://github.com/giridharvc7/ScreenRecord) to be a good solution, and [this article](https://medium.com/ymedialabs-innovation/screen-recording-your-app-using-ios-replaykit-for-bug-reporting-70f998c8c1b8) to be a good read. Like implemented in those referenced works, you can try putting your rec button and progress bar in a separate UIWindow, since that one won't be recorded. – Tulio Troncoso Jul 11 '17 at 18:36
  • Thanks for your comment. Have you tried this in combination with ARKit? Seems like it is pretty likely to get the same error I mentioned above. If you are not sure I'll just give it a try – Takeshi Yokemura Jul 12 '17 at 04:52
  • @TakeshiYokemura I have confirmed that ReplayKit works with ARKit/SceneKit, although I haven't tried to exclude UI elements such as the rec button, since this is not a goal of mine. My methodology was to begin with a single view application, then add the ARScenekit view, then add native UIKit elements on top. This is just my preferred method. If presenting a preview view controller, be sure to present it modally. – Roger Pingleton Jul 16 '17 at 12:34
  • I've been busy to test ScreenRecord for a while, but now I've found it works! Maybe I need some tweaks like hiding my UI while recording or so, but it's a good news to know it works. Thanks, Tulio! – Takeshi Yokemura Jul 24 '17 at 02:21
  • 1
    And I also found that it seems like you can avoid specific UI elements from included to the recording by putting them in a separated UIWindow. https://code.tutsplus.com/tutorials/ios-9-an-introduction-to-replaykit--cms-25458 – Takeshi Yokemura Jul 24 '17 at 07:56
  • Good info @TakeshiYokemura ! Yes, it appears as if Apple fixed some things in ARKit Beta 4 – Roger Pingleton Jul 25 '17 at 15:21
  • @TakeshiYokemura How did you make it work? I have the same issue, I can't record, I get error code `5804` and I don't understand why. – Marie Dm Sep 01 '17 at 09:38
  • 2
    I don't like the idea of using ReplayKit - users are presented with an ugly permission dialog to record screen each time. A way to encode the CVPixelBuffer to an MP4 container would be a much better solution (... I'm till trying to find a good example). – JCutting8 Jun 08 '20 at 10:10
  • @JCutting8 did you find the solution? – Jun Jul 15 '20 at 04:57
  • ARKit actually returns a captured frame CVPixelBuffer. If you are familiar with recording video/audio from the camera/mic using AVAssetWriter, then you can do this for ARKit videos too, but you will likely need to use OpenGL to draw on to your video frames. See more info in my answer here: https://stackoverflow.com/questions/59533059/how-to-record-video-in-realitykit?noredirect=1#comment111245470_59533059 – JCutting8 Jul 17 '20 at 08:28
2

Swift 5

You can use this ARCapture framework to record video from ARKit view

private var capture: ARCapture?
...

override func viewDidLoad() {
    super.viewDidLoad()

    // Create a new scene
    let scene = SCNScene()
    ...
    // TODO Setup ARSCNView with the scene
    // sceneView.scene = scene
    
    // Setup ARCapture
    capture = ARCapture(view: sceneView)

}

/// "Record" button action handler
@IBAction func recordAction(_ sender: UIButton) {
    capture?.start()
}

/// "Stop" button action handler
@IBAction func stopAction(_ sender: UIButton) {
    capture?.stop({ (status) in
        print("Video exported: \(status)")
    })
}

After you call ARCapture.stop the video will be presented in Photos app.

Alexander Volkov
  • 7,904
  • 1
  • 47
  • 44
1

Don't know if you have managed to answer this by now or not, but lacyrhoades, the person who wrote the class you referenced, has released another project on github that seems to do what you're asking for. I've used it and it manages to record the SceneView with AR Objects as well as the camera input. You can find it through this link:

https://github.com/lacyrhoades/SCNKit2Video

If you want to use it with AR though, you have to configure the ARSceneView to the project he's make, as his one just runs a SceneView, not one with AR.

Hope it helps.

  • I tried the ghitub project and succeeded to record a video with static objects. Did you manage to "configure the ARSceneView" like you said? – Marie Dm Aug 30 '17 at 14:50
  • 1
    @MarieDm check out my answer below. I hope it will help you in your awesome AR project :) – Ömer Karışman Sep 10 '17 at 09:52
  • Thanks @ÖmerKarışman. I found a solution from the project that inspired yours. Still, I gave it a try. I might have misconfigured it but there is no audio and there is an issue with the image which is really weird. – Marie Dm Sep 10 '17 at 13:49
  • Id be glad if you can open up an issue and send over some details. – Ömer Karışman Sep 10 '17 at 14:04
  • 1
    @ÖmerKarışman i just did on the opened issue #3 https://github.com/svtek/SceneKitVideoRecorder/issues/3 – Marie Dm Sep 10 '17 at 14:30
  • Here's a tutorial on how to do it - code is opensource: https://medium.com/ar-tips-and-tricks/arkit-how-to-make-your-own-audio-video-recorder-3d72016d1087?fbclid=IwAR2SiKIRxt3np5Wnn-Vi90XheEsGksSO5j0kj-N9emmQoFIoL80aqRGxndY – odlh Dec 03 '18 at 17:04
1

I just found this framework, called ARVideoKit, and it seems to be easy to implement plus they have more features such as capturing GIFs and Live Photos.

The framework official repo is: https://github.com/AFathi/ARVideoKit/

To install it, you'd have to clone the repo and drag the .framework file into your project's embedded binary.

Then the implementation is pretty simple:

  1. import ARVideoKit in your UIViewController class

  2. Create a RecordAR? variable

    var videoRec:RecordAR?

  3. Initialize your variable in viewDidLoad

    videoRec = RecordAR(ARSpriteKit:sceneView)

  4. Prepare RecordAR in viewWillAppear

    videoRec.prepare(configuration)

  5. Begin recording a video

    videoRec.record()

  6. Stop and export to camera roll!

    videoRec.stopAndExport()

Take a look at the framework's documentation, it supports more features to use!

You can find their documentation here: https://github.com/AFathi/ARVideoKit/wiki

Hope that helped!

Swift Learner
  • 103
  • 1
  • 1
  • 11
  • 1
    Consider expanding your answer to include how the framework can solve the questioner's problem. Link only answers are discouraged. – Tom Aranda Nov 06 '17 at 04:47
  • 1
    Unfortunately ARVideoKit doesn't work for RealityKit (i.e. ARKit3 body tracking etc.) – JCutting8 Jun 08 '20 at 10:13
1

ReplayKit is not a good solution because your users are presented with an ugly permission dialog, and you also have to work around it recording UI elements. You also have less control on your video resolution.

Instead, you should use the captured frame CVPixelBuffer returned by ARKit, and handle it like you would if you were recording frames captured from the camera. Assuming you need to process your video frames, you may also need to use a framework like Metal to handle the drawing. It is not simple. See answer provided here: How to record video in RealityKit?

JCutting8
  • 732
  • 9
  • 29
-6

If you can keep your device connected to your Mac, it is really easy to just use QuickTime Player to record screen (and sound) from your iOS device.

In QuickTime choose new Movie Recording in the File menu, then in the record dialog near the big red record button there is a little dropdown arrow, where you can pick audio input and video input. Choose your i-device there and you're set to go.

bjarnel
  • 55
  • 1
  • 3
  • 1
    Thanks for the comment. The purpose is not recording video just for me but providing the way to record in my app and let users use it easily. It might be the way just to say to the users "Hey, tap the iOS screen rec button!" but this is not the experience we wanna offer to the users. The ideal behavior is something like having a rec button inside my app and by tapping it to start recording the AR scene. – Takeshi Yokemura Jun 28 '17 at 09:18