4

I'm (desperately) trying to use a video as texture in a SCNScene with some fancy shader modifiers. I'd like to use a SCNProgram for that part. I've just taken the one from here:

#include <metal_stdlib>

using namespace metal;

#include <SceneKit/scn_metal>

struct MyNodeBuffer {
    float4x4 modelTransform;
    float4x4 modelViewTransform;
    float4x4 normalTransform;
    float4x4 modelViewProjectionTransform;
};

typedef struct {
    float3 position [[ attribute(SCNVertexSemanticPosition) ]];
    float2 texCoords [[ attribute(SCNVertexSemanticTexcoord0) ]];
} MyVertexInput;

struct SimpleVertex
{
    float4 position [[position]];
    float2 texCoords;
};

vertex SimpleVertex myVertex(MyVertexInput in [[ stage_in ]],
                             constant SCNSceneBuffer& scn_frame [[buffer(0)]],
                             constant MyNodeBuffer& scn_node [[buffer(1)]])
{
    SimpleVertex vert;
    vert.position = scn_node.modelViewProjectionTransform * float4(in.position, 1.0);
    vert.texCoords = in.texCoords;

    return vert;
}

fragment half4 myFragment(SimpleVertex in [[stage_in]],
                          texture2d<float, access::sample> diffuseTexture [[texture(0)]])
{
    constexpr sampler sampler2d(coord::normalized, filter::linear, address::repeat);
    float4 color = diffuseTexture.sample(sampler2d, in.texCoords);
    return half4(color);
}

that simply displays a texture on a geometry. I'm now creating a SceneKit scene, adding a simple cube that uses the SCNProgram, creating a SpriteKit scene with a simple square in it, and passing this SpriteKit scene as texture to my cube. It works fine:

class ViewController: UIViewController {
  @IBOutlet weak var scnView: SCNView!

  let fragmentModifier = try! String(contentsOfFile: Bundle.main.path(forResource: "fragment", ofType: "shader")!)
  var videoLayer: AVPlayerLayer!
  var videoPlayer: AVPlayer!

  override func viewDidLoad() {
    super.viewDidLoad()

    setupVideo()

    // Do any additional setup after loading the view, typically from a nib.
    scnView.showsStatistics = true
    scnView.allowsCameraControl = true

    let scnScene = SCNScene()
    scnView.scene = scnScene

    // setup SceneKit scene
    let cameraNode = SCNNode()
    cameraNode.camera = SCNCamera()
    cameraNode.position = SCNVector3(x: 0, y: 0, z: 25)
    scnScene.rootNode.addChildNode(cameraNode)

    let cubeNode = SCNNode()
    cubeNode.geometry = SCNBox(width: 5, height: 5, length: 5, chamferRadius: 0)
    scnScene.rootNode.addChildNode(cubeNode)

    // setup SpriteKit Scene
    let skScene = SKScene()
    skScene.backgroundColor = UIColor.black
    skScene.size = CGSize(width: 100, height: 100)

    // a simple green square
    let skNode = SKShapeNode(rect: CGRect(x: 0, y: 0, width: 20, height: 20))
    skNode.fillColor = UIColor.green
    skNode.position = CGPoint(x: 5, y: 5)

    // a node to hold a video
    let skVideoNode = SKVideoNode(avPlayer: videoPlayer)
    skVideoNode.size = CGSize(width: 100, height: 100)
    skVideoNode.position = CGPoint(x: 5, y: 5)

    skScene.addChild(skNode)
    // skScene.addChild(skVideoNode)

    let material = cubeNode.geometry!.firstMaterial!
    createProgram(for: material)

    let textureProperty = SCNMaterialProperty(contents: skScene)
    material.setValue(textureProperty, forKey: "diffuseTexture")
  }

  func setupVideo() {
    let item = AVPlayerItem(url: URL(fileURLWithPath: Bundle.main.path(forResource: "test", ofType: "mp4")!))
    let player = AVPlayer(playerItem: item)
    let layer = AVPlayerLayer(player: player)
    videoLayer = layer
    videoPlayer = player
    player.play()
  }

  func createProgram(for material: SCNMaterial) {
    let program = SCNProgram()
    program.fragmentFunctionName = "myFragment"
    program.vertexFunctionName = "myVertex"

    material.program = program
  }
}

Now, if I replace the square by a video node in the sprite kit scene, ie that I now do:

// skScene.addChild(skNode)
skScene.addChild(skVideoNode)

the output is just solid black. The video is indeed playing and outputs sound. If that's relevant, I'm on iOS 10.0.1.

Community
  • 1
  • 1
Guig
  • 9,891
  • 7
  • 64
  • 126
  • Did you initialize the video node with an AVPlayer? Are you streaming the video or using a local file? – schmittsfn Mar 18 '17 at 10:10
  • Yes I used an AVPlayer (what else is there?) I tried both streaming and playing a local file. Neither worked. – Guig Mar 19 '17 at 17:29
  • There is one alternative I know of: https://github.com/Bilibili/ijkplayer – schmittsfn Mar 20 '17 at 10:11
  • At first when I read "black output" it looked like your content might be protected, but now I read that Audio is playing and it's legible so your content is not encrypted I guess. I know of one method that works for unencrypted content which is reading the pixel buffer and creating an OpenGL texture with it. You'd use `copyPixelBufferForItemTime` on an `AVPlayerItemVideoOutput` that is attached to your `AVPlayerItem` to get a ref to the pixel buffer and then use its base address to create an OpenGL texture. – schmittsfn Mar 20 '17 at 10:22
  • I have no experience using SceneKit but once you have a GL texture maybe there is a way to pass it to a SceneKit node? – schmittsfn Mar 20 '17 at 10:24
  • Yeah it's the way I ended up going. I reported the issue to Apple and they told me they knew about it and closed my bug as a duplicate. Thanks – Guig Mar 20 '17 at 16:29
  • Would you be able to share your current workaround/solution? I am also trying to run a SCNShadable or SCNProgram on a video texture. –  Jun 15 '17 at 23:04
  • I've not find a direct workaround. It was a key problem for my product so I ditched SCNKit and created my own openGL rendered to replace it: https://github.com/team-pie/DDDKit – Guig Jun 15 '17 at 23:16

0 Answers0