2

Ok I'm going to keep this as simple as possible. I want to pass data between shader programs. I'm using readPixels currently to do that but I feel it may be slowing operations down and I'm exploring faster options.

what my program does:

  1. program1 does my rendering to the canvas.
  2. program2 does some wonderful operations in it's shaders that I want to pass to program1.

MY QUESTIONS:

  1. is it possible to use the vbo from program2 and pass that to program1 for rendering? From what it sounds like in the link I give below, you can't share data across contexts, meaning the data from one buffer can't be used for another. But maybe I'm missing something.
  2. I believe the method mentioned in this article would do what I'm looking for by rendering to a canvas and then using texImage2D to update program1 (Copy framebuffer data from one WebGLRenderingContext to another?). Am I correct? If so, would this be faster than using readPixels? ( i ask because if using texImage2D is about the same I won't bother ).

thanks in advance to anyone who answers.

  • You mention multiple contexts. Multiple contexts have nothing to do with passing data from one shader to another. Most WebGL apps don't use multiple contexts and would have no need even if they use multiple shaders so it might be better to describe your goal. – gman May 04 '20 at 10:05
  • to describe my need, I'm rendering my scene with program1 and I'm moving all the vertices for this scene using program2 ( since the number of vertices is large I'm using this seperate program, program2, to quickly do all this vector math, rather than using js ). I believe this justifies my need for a second program. Updating very large numbers of vertices in JS per frame would be incredibly slow. – Tony Thedea May 04 '20 at 11:23
  • A single WebGL context can have 1000s of programs. I'm not saying you don't need a second program. What you don't need is a second WebGL context – gman May 04 '20 at 11:46

2 Answers2

4

The normal way to pass data from one shader to the next is to render to a texture (by attaching that texture to a framebuffer). Then pass that texture to the second shader.

function main() {
  const gl = document.querySelector('canvas').getContext('webgl2');
  if (!gl) {
    return alert('need webgl2');
  }

  const vs1 = `#version 300 es
  void main () {
    gl_Position = vec4(0, 0, 0, 1);
    gl_PointSize = 64.0;
  }
  `;
  
  const fs1 = `#version 300 es
  precision highp float;
  out vec4 myOutColor;
  void main() {
    myOutColor = vec4(fract(gl_PointCoord * 4.), 0, 1);
  }
  `;
  
  const vs2 = `#version 300 es
  in vec4 position;
  void main () {
    gl_Position = position;
    gl_PointSize = 32.0;
  }
  `;
  
  const fs2 = `#version 300 es
  precision highp float;
  uniform sampler2D tex;
  out vec4 myOutColor;
  void main() {
    myOutColor = texture(tex, gl_PointCoord);
  }
  `;

  // make 2 programs
  const prg1 = twgl.createProgram(gl, [vs1, fs1]);
  const prg2 = twgl.createProgram(gl, [vs2, fs2]);

  // make a texture
  const tex = gl.createTexture();
  const texWidth = 64;
  const texHeight = 64;
  gl.bindTexture(gl.TEXTURE_2D, tex);
  gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA8, texWidth, texHeight, 0,
                gl.RGBA, gl.UNSIGNED_BYTE, null);
  gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);

  // attach texture to framebuffer
  const fb = gl.createFramebuffer();
  gl.bindFramebuffer(gl.FRAMEBUFFER, fb);
  gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0,
                          gl.TEXTURE_2D, tex, 0);

  // render to texture
  gl.viewport(0, 0, texWidth, texHeight);
  gl.useProgram(prg1);
  gl.drawArrays(gl.POINTS, 0, 1);
  
  // render texture (output of prg1) to canvas using prg2
  gl.bindFramebuffer(gl.FRAMEBUFFER, null);
  gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
  gl.useProgram(prg2);
  // note: the texture is already bound to texture unit 0
  // and uniforms default to 0 so the texture is already setup
  const posLoc = gl.getAttribLocation(prg2, 'position')
  const numDraws = 12
  for (let i = 0; i < numDraws; ++i) {
    const a = i / numDraws * Math.PI * 2;
    gl.vertexAttrib2f(posLoc, Math.sin(a) * .7, Math.cos(a) * .7);
    gl.drawArrays(gl.POINTS, 0, 1);
  }
}
main();
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

You can also use "transform feedback" to store the outputs of a vertex shader to one or more buffers and of course those buffers can be used as input to another shader.

// this example from
// https://webgl2fundamentals.org/webgl/lessons/resources/webgl-state-diagram.html?exampleId=transform-feedback
const canvas = document.querySelector('canvas');
const gl = canvas.getContext('webgl2');

const genPointsVSGLSL = `#version 300 es
uniform int numPoints;
out vec2 position;
out vec4 color;

#define PI radians(180.0)

void main() {
    float u = float(gl_VertexID) / float(numPoints);
    float a = u * PI * 2.0;
    position = vec2(cos(a), sin(a)) * 0.8;
    color = vec4(u, 0, 1.0 - u, 1);
}
`;

const genPointsFSGLSL = `#version 300 es
void main() {
  discard;
}
`;

const drawVSGLSL = `#version 300 es
in vec4 position;
in vec4 color;

out vec4 v_color;

void main() {
  gl_PointSize = 20.0;
  gl_Position = position;
  v_color = color;
}
`;

const drawFSGLSL = `#version 300 es
precision highp float;

in vec4 v_color;

out vec4 outColor;

void main() {
    outColor = v_color;
}
`;

const createShader = function(gl, type, glsl) {
  const shader = gl.createShader(type)
  gl.shaderSource(shader, glsl)
  gl.compileShader(shader)
  if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
    throw new Error(gl.getShaderInfoLog(shader))
  }
  return shader
};

const createProgram = function(gl, vsGLSL, fsGLSL, outVaryings) {
  const vs = createShader(gl, gl.VERTEX_SHADER, vsGLSL)
  const fs = createShader(gl, gl.FRAGMENT_SHADER, fsGLSL)
  const prg = gl.createProgram()
  gl.attachShader(prg, vs)
  gl.attachShader(prg, fs)
  if (outVaryings) {
    gl.transformFeedbackVaryings(prg, outVaryings, gl.SEPARATE_ATTRIBS)
  }
  gl.linkProgram(prg)
  if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
    throw new Error(gl.getProgramParameter(prg))
  }
  return prg
};

const genProg = createProgram(gl, genPointsVSGLSL, genPointsFSGLSL, ['position', 'color']);
const drawProg = createProgram(gl, drawVSGLSL, drawFSGLSL);

const numPointsLoc = gl.getUniformLocation(genProg, 'numPoints');

const posLoc = gl.getAttribLocation(drawProg, 'position');
const colorLoc = gl.getAttribLocation(drawProg, 'color');

const numPoints = 24;

// make a vertex array and attach 2 buffers
// one for 2D positions, 1 for colors.
const dotVertexArray = gl.createVertexArray();
gl.bindVertexArray(dotVertexArray);

const positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, numPoints * 2 * 4, gl.DYNAMIC_DRAW);
gl.enableVertexAttribArray(posLoc);
gl.vertexAttribPointer(
    posLoc,       // location
    2,            // size (components per iteration)
    gl.FLOAT,     // type of to get from buffer
    false,        // normalize
    0,            // stride (bytes to advance each iteration)
    0,            // offset (bytes from start of buffer)
);

const colorBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, colorBuffer);
gl.bufferData(gl.ARRAY_BUFFER, numPoints * 4 * 4, gl.DYNAMIC_DRAW);
gl.enableVertexAttribArray(colorLoc);
gl.vertexAttribPointer(
    colorLoc,   // location
    4,          // size (components per iteration)
    gl.FLOAT,   // type of to get from buffer
    false,      // normalize
    0,          // stride (bytes to advance each iteration)
    0,          // offset (bytes from start of buffer)
);

// This is not really needed but if we end up binding anything
// to ELEMENT_ARRAY_BUFFER, say we are generating indexed geometry
// we'll change cubeVertexArray's ELEMENT_ARRAY_BUFFER. By binding
// null here that won't happen.
gl.bindVertexArray(null);

// setup a transform feedback object to write to
// the position and color buffers
const tf = gl.createTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, positionBuffer);
gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 1, colorBuffer);
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);

// above this line is initialization code
// --------------------------------------
// below is rendering code.

// --------------------------------------
// First compute points into buffers

// no need to call the fragment shader
gl.enable(gl.RASTERIZER_DISCARD);

// unbind the buffers so we don't get errors.
gl.bindBuffer(gl.TRANSFORM_FEEDBACK_BUFFER, null);
gl.bindBuffer(gl.ARRAY_BUFFER, null);

gl.useProgram(genProg);

// generate numPoints of positions and colors
// into the buffers
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, tf);
gl.beginTransformFeedback(gl.POINTS);
gl.uniform1i(numPointsLoc, numPoints);
gl.drawArrays(gl.POINTS, 0, numPoints);
gl.endTransformFeedback();
gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, null);

// turn on using fragment shaders again
gl.disable(gl.RASTERIZER_DISCARD);

// --------------------------------------
// Now draw using the buffers we just computed

gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

gl.bindVertexArray(dotVertexArray);
gl.useProgram(drawProg);
gl.drawArrays(gl.POINTS, 0, numPoints);
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
<canvas></canvas>

Also this answer might be useful.

gman
  • 100,619
  • 31
  • 269
  • 393
  • so it seems what I was asking about in my first question may be possible BUT a separate canvas ( which would require a second context. please correct me if that statement is wrong. ) would be unnecessary. **What I mean to say is, it sounds like i can render to a texture in program2 and then take the output in the framebuffer from this program and use it as the vbo input for program1.** I know I'm probably messing up some terms here and perhaps I'm confusing framebuffers and renderbuffers here, but does this general idea sound correct? – Tony Thedea May 04 '20 at 12:27
  • I believe that's somewhat of what you did in your particle example using webgl in the code snippet you provided in the link above ( https://stackoverflow.com/questions/56780278/how-to-keep-coordination-between-particles-and-which-texture-pixel-contains-each ). – Tony Thedea May 04 '20 at 12:28
  • You can either (a) output to a vertex buffer (vbo) from a vertex shader using transform feedback - the bottom example or (b) output to a texture from a fragment shader using a framebuffer - top example. You can not use a texture as a vertex buffer (vbo) but you can use a texture as vertex data. [Example](https://webgl2fundamentals.org/webgl/lessons/webgl-pulling-vertices.html) and yes that S.O. answer is also an example – gman May 04 '20 at 12:56
  • just clarifying so I don't miss anything, **you can't set attribute values for a shader using a texture's fb output, correct?** In addition, just clarifying, "...you can use a texture as vertex data" means one could use texture data to set gl_Position rather than attribute data ( which I believe is what the examples you gave are doing: [example1](https://webgl2fundamentals.org/webgl/lessons/webgl-pulling-vertices.html) and [example2](https://stackoverflow.com/questions/56780278/how-to-keep-coordination-between-particles-and-which-texture-pixel-contains-each) ), correct? – Tony Thedea May 04 '20 at 13:31
  • I'm pretty sure the answer is yes to both questions. just wanted to be sure. transform feedback sounds promising though and very much like what I stated when i said: "it sounds like i can render to a texture in program2 and then take the output in the framebuffer from this program and use it as the vbo input for program1." EXCEPT, as you said I CAN'T send my texture to a vbo, but I CAN use the buffer output from 1 program to feed into another using transform feedback. It seems the only difference is I'm using transform feedback, roughly correct? – Tony Thedea May 04 '20 at 14:02
  • I'm just now learning about transform feedback and this link seems helpful as a sample: https://www.ibiblio.org/e-notes/webgl/gpu/bounce.htm I'll post back here when I've explored it more. thanks @gman – Tony Thedea May 04 '20 at 14:03
  • I gave you a transform feedback demo above. I'm not sure how that link is more or less helpful. But if it helps you great – gman May 04 '20 at 14:16
  • You are correct, I went back and reviewed yours. It's even better. thanks – Tony Thedea May 04 '20 at 14:29
0

ok so what I was trying to do is something like the following ( hopefully this helps someone else in future ). Basically I want to have one shader doing calculations for movement (program#2) for another shader which will render (program#1). I want to avoid any vector calculations in JS. This example combines @gman's transform feedback sample and the sample I provided above:

const canvas = document.querySelector('canvas');
   var gl = canvas.getContext('webgl2', {preserveDrawingBuffer: true});
   
   
   // ___________shaders
   
   // ___________vs and fs #1

   const genPointsVSGLSL = `#version 300 es
   in vec4 aPos;
   void main(void) {
      gl_PointSize = 20.0;
      gl_Position = vec4( -0.01 + aPos.x , -0.01+aPos.y , aPos.zw);
   }
   `;

   const genPointsFSGLSL = `#version 300 es
   precision highp float;
   out vec4 color;
   void main() {
    discard;
    //color = vec4(0.5,0.5,0.0,1.0);
   }
   `;
  
  
  
   // ___________vs and fs #2
   
   const drawVSGLSL = `#version 300 es
   in vec4 position;

   void main() {
     gl_PointSize = 20.0;
     gl_Position = position;
   }
   `;

   const drawFSGLSL = `#version 300 es
   precision highp float;

   out vec4 outColor;

   void main() {
    outColor = vec4( 255.0,0.0,0.0,1.0 );
   }
   `;
   
  
  
  
   // create shaders and programs code

   const createShader = function(gl, type, glsl) {
     const shader = gl.createShader(type)
     gl.shaderSource(shader, glsl)
     gl.compileShader(shader)
     if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
    throw new Error(gl.getShaderInfoLog(shader))
     }
     return shader
   };

   const createProgram = function(gl, vsGLSL, fsGLSL, outVaryings) {
     const vs = createShader(gl, gl.VERTEX_SHADER, vsGLSL)
     const fs = createShader(gl, gl.FRAGMENT_SHADER, fsGLSL)
     const prg = gl.createProgram()
     gl.attachShader(prg, vs)
     gl.attachShader(prg, fs)
     if (outVaryings) {
    gl.transformFeedbackVaryings(prg, outVaryings, gl.SEPARATE_ATTRIBS)
     }
     gl.linkProgram(prg)
     if (!gl.getProgramParameter(prg, gl.LINK_STATUS)) {
    throw new Error(gl.getProgramParameter(prg))
     }
     return prg
   };

   const genProg = createProgram(gl, genPointsVSGLSL, genPointsFSGLSL, ['gl_Position']);
   const drawProg = createProgram(gl, drawVSGLSL, drawFSGLSL, ['gl_Position']);

   
   
   // program1 location attribute
   const positionLoc = gl.getAttribLocation( drawProg , 'position');
   // program2 location attribute
   const aPosLoc = gl.getAttribLocation( genProg , 'aPos');
  

  
   var vertizes = [0.8,0,0,1, 0.8,0.5,0,1];
   var indizes = vertizes.length/4;

  
   // create buffers and transform feedback
   var bufA = gl.createBuffer()
   gl.bindBuffer(gl.ARRAY_BUFFER, bufA)
   gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( vertizes ), gl.DYNAMIC_COPY)

   var bufB = gl.createBuffer()
   gl.bindBuffer(gl.ARRAY_BUFFER, bufB)
   gl.bufferData(gl.ARRAY_BUFFER, new Float32Array( vertizes ) , gl.DYNAMIC_COPY)

   var transformFeedback = gl.createTransformFeedback()
   gl.bindTransformFeedback(gl.TRANSFORM_FEEDBACK, transformFeedback)
  
  
  
  
   // draw

   function draw(){
    
    
    gl.useProgram( genProg );
    gl.clear(gl.COLOR_BUFFER_BIT);
    

    // bind bufA to output of program#2
    gl.bindBuffer(gl.ARRAY_BUFFER, bufA);
    gl.enableVertexAttribArray( aPosLoc );
    gl.vertexAttribPointer(aPosLoc, 4, gl.FLOAT, gl.FALSE, 0, 0)
    
    // run movement calculation code, aka program#2 (calculate movement location and hide the results using RASTERIZER_DISCARD )
    gl.enable(gl.RASTERIZER_DISCARD);
    gl.drawArrays(gl.POINTS, 0, indizes);
    gl.disable(gl.RASTERIZER_DISCARD);
    
    gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, bufB);
    
    
    
    
     // move dot using rendering code and the position calculated previously which is still stored in bufA
    gl.useProgram( drawProg );
    gl.bindBuffer( gl.ARRAY_BUFFER, bufA );
    gl.enableVertexAttribArray( positionLoc );
    gl.vertexAttribPointer( positionLoc , 4, gl.FLOAT, gl.FALSE, 0, 0);
    gl.drawArrays(gl.POINTS, 0, indizes);
    
    
    
    gl.useProgram( genProg );
    
    // run transforma feedback
    gl.beginTransformFeedback(gl.POINTS);
    gl.drawArrays(gl.POINTS, 0, indizes);
    gl.endTransformFeedback();

    gl.bindBufferBase(gl.TRANSFORM_FEEDBACK_BUFFER, 0, null);

    // switch bufA and bufB in preperation for the next draw call
    var t = bufA;
    bufA = bufB;
    bufB = t;
    
   }
  
  
  
   setInterval( draw , 100 );
<script src="https://twgljs.org/dist/4.x/twgl.min.js"></script>
  <canvas></canvas>