3

I need to get the rotation differences between the model and the camera, convert the values to radians/degrees, and pass it to the fragment shader.

For that I need to decompose and the Model rotation matrix and maybe the camera view matrix as well. I cannot seem to find a way to decompose mechanism suitable within a shader.

The rotation details goes into fragment shader to calculate uv offset.

original_rotation + viewing_angles to calculate a final sprite-like offset of the following texture and shown as billboards.

Ultimately UV should offset downwards (ex:H3 to A3) looking from down, upwards looking from up (ex:A3 to H3), left to right looking and viceversa looking from sides (ex: D1 to D8 and viceversa).

texture to offset

    const vertex_shader = `
    precision highp float;
    uniform mat4 modelViewMatrix;
    uniform mat4 projectionMatrix;
    attribute vec3 position;
    attribute vec2 uv;
    attribute mat4 instanceMatrix;
    attribute float index;
    attribute float texture_index;
    uniform vec2 rows_cols;
    uniform vec3 camera_location;

    varying float vTexIndex;
    varying vec2 vUv;
    varying vec4 transformed_normal;


        float normal_to_orbit(vec3 rotation_vector, vec3 view_vector){

            rotation_vector = normalize(rotation_vector);
            view_vector = normalize(view_vector);
            vec3 x_direction = vec3(1.0,0,0);
            vec3 y_direction = vec3(0,1.0,0);
            vec3 z_direction = vec3(0,0,1.0);

            float rotation_x_length = dot(rotation_vector, x_direction);
            float rotation_y_length = dot(rotation_vector, y_direction);
            float rotation_z_length = dot(rotation_vector, z_direction);

            float view_x_length = dot(view_vector, x_direction);
            float view_y_length = dot(view_vector, y_direction);
            float view_z_length = dot(view_vector, z_direction);

            //TOP
            float top_rotation = degrees(atan(rotation_x_length, rotation_z_length));
            float top_view = degrees(atan(view_x_length, view_z_length));
            float top_final = top_view-top_rotation;
            float top_idx = floor(top_final/(360.0/rows_cols.x));
            //FRONT
            float front_rotation = degrees(atan(rotation_x_length, rotation_z_length));
            float front_view = degrees(atan(view_x_length, view_z_length));
            float front_final = front_view-front_rotation;
            float front_idx = floor(front_final/(360.0/rows_cols.y));

            return abs((front_idx*rows_cols.x)+top_idx);
        }

    vec3 extractEulerAngleXYZ(mat4 mat) {
        vec3 rotangles = vec3(0,0,0);
        rotangles.x = atan(mat[2].z, -mat[1].z);
        float cosYangle = sqrt(pow(mat[0].x, 2.0) + pow(mat[0].y, 2.0));
        rotangles.y = atan(cosYangle, mat[0].z);
        float sinXangle = sin(rotangles.x);
        float cosXangle = cos(rotangles.x);
        rotangles.z = atan(cosXangle * mat[1].y + sinXangle * mat[2].y, cosXangle * mat[1].x + sinXangle * mat[2].x);
        return rotangles;
    }

float view_index(vec3 position, mat4 mv_matrix, mat4 rot_matrix){
    vec4 posInView = mv_matrix * vec4(0.0, 0.0, 0.0, 1.0);
    // posInView /= posInView[3];
    vec3 VinView = normalize(-posInView.xyz); // (0, 0, 0) - posInView
    // vec4 NinView = normalize(rot_matrix * vec4(0.0, 0.0, 1.0, 1.0));
    // float NdotV = dot(NinView, VinView);
    vec4 view_normal = rot_matrix * vec4(VinView.xyz, 1.0);
    float view_x_length = dot(view_normal.xyz, vec3(1.0,0,0));
    float view_y_length = dot(view_normal.xyz, vec3(0,1.0,0));
    float view_z_length = dot(view_normal.xyz, vec3(0,0,1.0));
    // float radians = atan(-view_x_length, -view_z_length);
    float radians = atan(view_x_length, view_z_length);
    // float angle = radians/PI*180.0 + 180.0;
    float angle = degrees(radians);
    if (radians < 0.0) { angle += 360.0;  }
    if (0.0<=angle && angle<=360.0){
        return floor(angle/(360.0/rows_cols.x));
    }
    return 0.0;

}

    void main(){
        vec4 original_normal = vec4(0.0, 0.0, 1.0, 1.0);
        // transformed_normal = modelViewMatrix * instanceMatrix * original_normal;
        vec3 rotangles = extractEulerAngleXYZ(modelViewMatrix * instanceMatrix);
        // transformed_normal = vec4(rotangles.xyz, 1.0);
        transformed_normal = vec4(camera_location.xyz, 1.0);


        vec4 v = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 0.0, 1.0)) + vec4(position.x, position.y, 0.0, 0.0) * vec4(1.0, 1.0, 1.0, 1.0);
        vec4 model_center = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 0.0, 1.0));
        vec4 model_normal = (modelViewMatrix* instanceMatrix* vec4(0.0, 0.0, 1.0, 1.0));
        vec4 cam_loc = vec4(camera_location.xyz, 1.0);
        vec4 view_vector = normalize((cam_loc-model_center));
        //float findex = normal_to_orbit(model_normal.xyz, view_vector.xyz);
   float findex = view_index(position, base_matrix, combined_rot);

        vTexIndex = texture_index;
        vUv = vec2(mod(findex,rows_cols.x)/rows_cols.x, floor(findex/rows_cols.x)/rows_cols.y) + (uv / rows_cols);
        //vUv = vec2(mod(index,rows_cols.x)/rows_cols.x, floor(index/rows_cols.x)/rows_cols.y) + (uv / rows_cols);

        gl_Position = projectionMatrix * v;
        // gl_Position = projectionMatrix * modelViewMatrix * instanceMatrix * vec4(position, 1.0);
    }
    `
  const fragment_shader = (texture_count) => {
    var fragShader = `
          precision highp float;
          uniform sampler2D textures[${texture_count}];
          varying float vTexIndex;
          varying vec2 vUv;
          varying vec4 transformed_normal;

          void main() {
              vec4 finalColor;
              `;
    for (var i = 0; i < texture_count; i++) {
      if (i == 0) {
        fragShader += `if (vTexIndex < ${i}.5) {
                  finalColor = texture2D(textures[${i}], vUv);
                  }
                `
      } else {
        fragShader += `else if (vTexIndex < ${i}.5) {
                  finalColor = texture2D(textures[${i}], vUv);
                  }
                `
      }
    }
    //fragShader += `gl_FragColor = finalColor * transformed_normal; }`;
    fragShader += `gl_FragColor = finalColor; }`;
    // fragShader += `gl_FragColor = startColor * finalColor; }`;   
    // int index = int(v_TexIndex+0.5); //https://stackoverflow.com/questions/60896915/texture-slot-not-getting-picked-properly-in-shader-issue
    //console.log('frag shader: ', fragShader)
    return fragShader;
  }

  function reset_instance_positions() {
    const dummy = new THREE.Object3D();
    const offset = 500*4
    for (var i = 0; i < max_instances; i++) {
      dummy.position.set(offset-(Math.floor(i % 8)*500), offset-(Math.floor(i / 8)*500), 0);
      dummy.updateMatrix();
      mesh.setMatrixAt(i, dummy.matrix);
    }
    mesh.instanceMatrix.needsUpdate = true;
  }

  function setup_geometry() {
    const geometry = new THREE.InstancedBufferGeometry().copy(new THREE.PlaneBufferGeometry(400, 400));
    const index = new Float32Array(max_instances * 1); // index
    for (let i = 0; i < max_instances; i++) {
      index[i] = (i % max_instances) * 1.0 /* index[i] = 0.0  */
    }
    geometry.setAttribute("index", new THREE.InstancedBufferAttribute(index, 1));
    const texture_index = new Float32Array(max_instances * 1); // texture_index
    const max_maps = 1
    for (let i = 0; i < max_instances; i++) {
      texture_index[i] = (Math.floor(i / max_instances) % max_maps) * 1.0 /* index[i] = 0.0  */
    }
    geometry.setAttribute("texture_index", new THREE.InstancedBufferAttribute(texture_index, 1));
    const textures = [texture]
    const grid_xy = new THREE.Vector2(8, 8)
    mesh = new THREE.InstancedMesh(geometry,
      new THREE.RawShaderMaterial({
        uniforms: {
          textures: {
            type: 'tv',
            value: textures
          },
          rows_cols: {
            value: new THREE.Vector2(grid_xy.x * 1.0, grid_xy.y * 1.0)
          },
          camera_location: {
            value: camera.position
          }
        },
        vertexShader: vertex_shader,
        fragmentShader: fragment_shader(textures.length),
        side: THREE.DoubleSide,
        // transparent: true,
      }), max_instances);
    scene.add(mesh);
    reset_instance_positions()
  }

  var camera, scene, mesh, renderer;
  const max_instances = 64

  function init() {

    camera = new THREE.PerspectiveCamera(60, window.innerWidth / window.innerHeight,1, 10000 );
    camera.position.z = 1024;

    scene = new THREE.Scene();
    scene.background = new THREE.Color(0xffffff);

    setup_geometry()
    var canvas = document.createElement('canvas');
    var context = canvas.getContext('webgl2');

    renderer = new THREE.WebGLRenderer({
      canvas: canvas,
      context: context
    });
    renderer.setPixelRatio(window.devicePixelRatio);
    renderer.setSize(window.innerWidth, window.innerHeight);
    document.body.appendChild(renderer.domElement);

    window.addEventListener('resize', onWindowResize, false);

    var controls = new THREE.OrbitControls(camera, renderer.domElement);
  }

  function onWindowResize() {

    camera.aspect = window.innerWidth / window.innerHeight;
    camera.updateProjectionMatrix();

    renderer.setSize(window.innerWidth, window.innerHeight);

  }

  function animate() {
    requestAnimationFrame(animate);

    renderer.render(scene, camera);
  }
  var dataurl = "https://i.stack.imgur.com/accaU.png"

  var texture;
  var imageElement = document.createElement('img');
  imageElement.onload = function(e) {
    texture = new THREE.Texture(this);
    texture.needsUpdate = true;
    init();
    animate();
  };
  imageElement.src = dataurl;

JSFiddle of work so far

halfer
  • 19,824
  • 17
  • 99
  • 186
Sadern Alwis
  • 104
  • 1
  • 4
  • 17
  • Comments are not for extended discussion; this conversation has been [moved to chat](https://chat.stackoverflow.com/rooms/243479/discussion-on-question-by-sadern-alwis-decompose-a-glsl-mat4-to-original-rts-val). – Samuel Liew Mar 31 '22 at 10:50

1 Answers1

2

So You got 4x4 transform matrix M used on xy plane QUAD and want to map its 4 corners (p0,p1,p2,p3) to your texture with "repaeat" like manner (crossing border from left/right/up/down will return right/left/down/up) based on direction of Z axis of the matrix.

You face 2 problems...

  1. M rotation is 3 DOF and you want just 2 DOF (yaw,pitch) so if roll present the result might be questionable

  2. if texture crosses borders you need to handle this in GLSL to avoid seems

    so either do this in geometry shader and divide the quad to more if needed or use enlarged texture where you have the needed overlaps ...

Now if I did not miss something the conversion is like this:

const float pi=3.1415926535897932384626433832795;
vec3 d = normalize(z axis from M);
vec2 dd = normalize(d.xy);
u = atan2(dd.y,dd.x);
v = acos(d.z);
u = (u+pi)/(2.0*pi);
v = v/pi

The z axis extraction is just simple copy of 3th column/row (depends on your notation) from your matrix 'M' or transforming (1,0,0,0) by it. For more info see:

In case of overlapped texture you need to add also this:

const float ov = 1.0/8.0; // overlap size
u = ov + (u/(ov+ov+1.0));
v = ov + (v/(ov+ov+1.0));

And the texture would look like:

overlap texture

In case your quads cover more than 1/8 of your original texture you need to enlarge the overlap ...

Now to handle the corners of QUAD instead of just axis you could translate the quad by distance l in Z+ direction in mesh local coordinates, apply the M on them and use those 4 points as directions to compute u,v in vertex shader. The l will affect how much of the texture area is used for quad ... This approach might even handle roll but did not test any of this yet...

After implementing it my fears was well grounded as any 2 euler angles affect each other so the result is OK on most of the directions but in edge cases the stuff get mirrored and or jumped in one or both axises probably due to area coverage difference between 3 DOF and 2 DOF (unless I made a bug in my code or the math was not computed correctly in vertex which happened to me before due to bug in drivers)

If you going for azimut/elevation that should be fine as its 2 DOF too the equation above shoul dwork for them too +/- some range conversion if needed.

Spektre
  • 49,595
  • 11
  • 110
  • 380
  • overlapping the texture is acceptable. I put down the code offset_uv() at https://jsfiddle.net/sadernalwis/adzL047x/95/ .. but after multiplying with the view vector what we get is a Vec4 not Mat4x4. so , normalize(z axis from M) cannot be normalize(mv_matrix[3].xyz) which only accounts for model rotation without accounting for viewer position.. right? – Sadern Alwis Apr 01 '22 at 13:49
  • "radians to <0.0 , 1.0> range" is that a clamp? – Sadern Alwis Apr 01 '22 at 13:52
  • @SadernAlwis no ... you have to use linear interpolation so the atan result is `(atan(...)+pi)/(2*pi)` and `acos(...)/pi` ... I have it implemented but it was as I feared its not working as you expect as any 2 euler anglers affects each other (not even roll is present) so in some angles its working as expected and in some its mirrored or jumped ... so unless I am missing something The only option is to use [GL_CUBE_MAP](https://i.stack.imgur.com/pqI29.png) and make distortion of the map so it covers the cube/sphere area instead of single face ... however the distortion will be visible ... – Spektre Apr 01 '22 at 14:04
  • understood. cubemap seems not the way given the distortion mapping and abuse nature of it. im gonna look more into azimuth and elevation for now. anyways thank you for all your effort and guidance. ill keep the post updated. – Sadern Alwis Apr 01 '22 at 14:36
  • https://jsfiddle.net/sadernalwis/adzL047x/97/ elevation works as azimuth :D, for now i can live with that. just need to find out why actual azimuth and elevation is not computed. – Sadern Alwis Apr 01 '22 at 15:08