3

I'm working on an implementation of normal mapping, calculating the tangent vectors via the ASSIMP library.

The normal mapping seems to work perfectly on objects that have a model matrix close to the identity matrix. As long as I start translating and scaling, my lighting seems off. As you can see in the picture the normal mapping works perfectly on the container cube, but the lighting fails on the large floor (direction of the specular light should be towards the player, not towards the container).

Lighting not working on Normal mapping

I get the feeling it somehow has something to do with the position of the light (currently traversing from x = -10 to x = 10 over time) not properly being included in the calculations as long as I start changing the model matrix (via translations/scaling). I'm posting all the relevant code and hope you guys can somehow see something I'm missing since I've been staring at my code for days.

Vertex shader

#version 330

layout(location = 0) in vec3 position;
layout(location = 1) in vec3 normal;
layout(location = 2) in vec3 tangent;
layout(location = 3) in vec3 color;
layout(location = 4) in vec2 texCoord;

// fragment pass through
out vec3 Position;
out vec3 Normal;
out vec3 Tangent;
out vec3 Color;
out vec2 TexCoord;

out vec3 TangentSurface2Light;
out vec3 TangentSurface2View;

uniform vec3 lightPos;

// vertex transformation
uniform mat4 model;
uniform mat4 view;
uniform mat4 projection;

void main()
{
    mat3 normalMatrix = transpose(mat3(inverse(view * model))); 
    Position = vec3((view * model) * vec4(position, 1.0)); 
    Normal = normalMatrix * normal;
    Tangent = tangent;
    Color = color;
    TexCoord = texCoord;

    gl_Position = projection * view * model * vec4(position, 1.0);

    vec3 light = vec3(view * vec4(lightPos, 1.0));
    vec3 n = normalize(normalMatrix * normal);
    vec3 t = normalize(normalMatrix * tangent);
    vec3 b = cross(n, t);
    mat3 mat = mat3(t.x, b.x ,n.x, t.y, b.y ,n.y, t.z, b.z ,n.z);
    vec3 vector = normalize(light - Position);
    TangentSurface2Light = mat * vector;
    vector = normalize(-Position);
    TangentSurface2View = mat * vector;
}

Fragment shader

#version 330

in vec3 Position;
in vec3 Normal;
in vec3 Tangent;
in vec3 Color;
in vec2 TexCoord;

in vec3 TangentSurface2Light;
in vec3 TangentSurface2View;

out vec4 outColor;

uniform vec3 lightPos;
uniform mat4 view;
uniform sampler2D texture0;
uniform sampler2D texture_normal; // normal

uniform float repeatFactor = 1;

void main()
{   
    vec4 texColor = texture(texture0, TexCoord * repeatFactor);
    vec3 light = vec3(view * vec4(lightPos, 1.0));
    float dist = length(light - Position);
    float att = 1.0 / (1.0 + 0.01 * dist + 0.001 * dist * dist);
    // Ambient
    vec4 ambient = vec4(0.2);
    // Diffuse
    vec3 surface2light = normalize(TangentSurface2Light);
    vec3 norm = normalize(texture(texture_normal, TexCoord * repeatFactor).xyz * 2.0 - 1.0); 
    float contribution = max(dot(norm, surface2light), 0.0);
    vec4 diffuse = contribution * vec4(0.8);
    // Specular
    vec3 surf2view = normalize(TangentSurface2View);
    vec3 reflection = reflect(-surface2light, norm); // reflection vector
    float specContribution = pow(max(dot(surf2view, reflection), 0.0), 32);
    vec4 specular = vec4(0.6) * specContribution;

    outColor = (ambient + (diffuse * att)+ (specular * pow(att, 3))) * texColor;
}

OpenGL Drawing Code

void Render()
{
    ...

    glm::mat4 view, projection; // Model will be done via MatrixStack
    view = glm::lookAt(position, position + direction, up); // cam pos, look at (eye pos), up vec
    projection = glm::perspective(45.0f, (float)width/(float)height, 0.1f, 1000.0f);
    glUniformMatrix4fv(glGetUniformLocation(basicShader.shaderProgram, "view"), 1, GL_FALSE, glm::value_ptr(view));
    glUniformMatrix4fv(glGetUniformLocation(basicShader.shaderProgram, "projection"), 1, GL_FALSE, glm::value_ptr(projection));

    // Lighting
    lightPos.x = 0.0 + sin(time / 125) * 10;

    glUniform3f(glGetUniformLocation(basicShader.shaderProgram, "lightPos"), lightPos.x, lightPos.y, lightPos.z);

    // Objects  (use bump mapping on this cube)
    bumpShader.Use();
    glUniformMatrix4fv(glGetUniformLocation(bumpShader.shaderProgram, "view"), 1, GL_FALSE, glm::value_ptr(view));
    glUniformMatrix4fv(glGetUniformLocation(bumpShader.shaderProgram, "projection"), 1, GL_FALSE, glm::value_ptr(projection));
    glUniform3f(glGetUniformLocation(bumpShader.shaderProgram, "lightPos"), lightPos.x, lightPos.y, lightPos.z);
    MatrixStack::LoadIdentity();
    MatrixStack::Scale(2);
    MatrixStack::ToShader(glGetUniformLocation(bumpShader.shaderProgram, "model"));

    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, resources.GetTexture("container"));
    glUniform1i(glGetUniformLocation(bumpShader.shaderProgram, "img"), 0);
    glActiveTexture(GL_TEXTURE1); // Normal map
    glBindTexture(GL_TEXTURE_2D, resources.GetTexture("container_normal"));
    glUniform1i(glGetUniformLocation(bumpShader.shaderProgram, "normalMap"), 1);

    glUniform1f(glGetUniformLocation(bumpShader.shaderProgram, "repeatFactor"), 1);
    cubeNormal.Draw();

    MatrixStack::LoadIdentity();
    MatrixStack::Translate(glm::vec3(0.0f, -22.0f, 0.0f));
    MatrixStack::Scale(glm::vec3(200.0f, 20.0f, 200.0f));
    MatrixStack::ToShader(glGetUniformLocation(bumpShader.shaderProgram, "model"));
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, resources.GetTexture("floor"));
    glActiveTexture(GL_TEXTURE1); // Normal map
    glBindTexture(GL_TEXTURE_2D, resources.GetTexture("floor_normal"));
    glUniform1f(glGetUniformLocation(bumpShader.shaderProgram, "repeatFactor"), 100);
    cubeNormal.Draw();

    MatrixStack::LoadIdentity();
    glActiveTexture(GL_TEXTURE0);

    ...
}

EDIT I now loaded my objects using the ASSIMP library with the 'aiProcess_CalcTangentSpace' flag enabled and changed my shaders accordingly to adapt to the new changes. Since ASSIMP now automatically calculates the correct tangent vectors I should have valid tangent vectors and my problem should be solved (as noted by Nicol Bolas), but I still have the same issue with the specular lighting acting strange and the diffuse lighting not really showing up. I guess there is still something else that is not working correctly. I unmarked your answer as the correct answer Nicol Bolas (for now) and updated my code accordingly since there is still something I'm missing.

It probably has something to do with translating. As soon as I add a translate (-22.0f in y direction) to the Model matrix it reacts with strange lighting. As long as the floor (which is actually a cube) has no translation the lighting looks fine.

Joey Dewd
  • 1,804
  • 3
  • 20
  • 43

2 Answers2

9

calculating the tangent vectors in the vertex shader

Well there's your problem. That's not possible for an arbitrary surface.

The tangent and bitangent are not arbitrary vectors that are perpendicular to one another. They are model-space direction vectors that point in the direction of the texture coordinates. The tangent points in the direction of the S texture coordinate, and the bitangent points in the direction of the T texture coordinate (or U and V for the tex coords, if you prefer).

This effectively computes the orientation of the texture relative to each vertex on the surface. You need this orientation, because the way the texture is mapped to the surface matters when you want to make sense of a tangent-space vector.

Remember: tangent-space is the space perpendicular to a surface. But you need to know how that surface is mapped to the object in order to know where "up" is, for example. Take a square surface. You could map a texture so that the +Y part of the square is oriented along the +T direction of the texture. Or it could be along the +X of the square. You could even map it so that the texture is distorted, or rotated at an arbitrary angle.

The tangent and bitangent vectors are intended to correct for this mapping. They point in the S and T directions in model space. So, combined with the normal, they form a transformation matrix to transform from tangent-space into whatever space the 3 vectors are in (you generally transform the NBT to camera space or whatever space you use for lighting before using them).

You cannot compute them by just taking the normal and crossing it with some arbitrary vector. That produces a perpendicular normal, but not the right one.

In order to correctly compute the tangent/bitangent, you need access to more than one vertex. You need to be able to see how the texture coordinates change over the surface of the mesh, which is how you compute the S and T directions relative to the mesh.

Vertex shaders cannot access more than one vertex. Geometry shaders can't (generally) access enough vertices to do this either. Compute the tangent/bitangent off-line on the CPU.

Nicol Bolas
  • 449,505
  • 63
  • 781
  • 982
  • Thanks for the extensive answer! I was afraid that might be the case since the vertex shader implementation is a little hack. So the reason the container cube correctly displays the normal mapping is probably a coincidence since the tangent coordinates must have been correct at its current location/transformation, right?. I will look up some guides/tutorials on how to properly calculate tangent vectors offline. – Joey Dewd Aug 08 '13 at 08:12
  • 2
    @Jessy: Catch on? They're in the *OpenGL Specification*. It's a part of GLSL. It's like saying you should call them "pixel shaders" or some other nonsense like that, just because Microsoft doesn't know that something's not a pixel until it actually gets to an image. It's correct OpenGL terminology, the OP asked an OpenGL question, so I answered it in accord with OpenGL. – Nicol Bolas Aug 08 '13 at 12:29
  • @Jessy: "*OpenGL has never used 3D software in its life.*" ... what? OpenGL doesn't use 3D software; it's an *API* (technically a specification). That's like saying that my car has never used a motorcycle. Yeah, I guess that's *true* in some sense, but that has nothing to do with how texture coordinates are named. – Nicol Bolas Aug 08 '13 at 17:40
  • @Jessy: And FYI, the reason OpenGL uses STR for texture coordinates is because UVW confuses the last coordinate W with the W from XYZW. – Nicol Bolas Aug 08 '13 at 17:41
  • People don't use W. Only people who use 3DS Max refer to UVWs, and the W is meaningless. The spec should have changed long ago. We should adopt .uv and continue to not use .stpq. And I agree that you shouldn't take advice from Microsoft. –  Aug 08 '13 at 17:55
  • @Jessy: "*the W is meaningless*" ... So you don't believe in 3D textures? Or cubemap textures, which also use 3D texture coordinates? Or 2D array textures which *also* use 3D texture coordinates? There's a lot more out there than 2D textures. – Nicol Bolas Aug 08 '13 at 18:18
  • Cubemaps are thought of in .xyz terms; people don't store separate lookups in vertex data for them. I can't speak for what people are doing with the others. I have no idea what's going to become commonplace for the third dimension of textures, but .uv is the established standard for the first two. The built-in vec types are likely to become more malleable; e.g. as it is, .w won't compile for a vec3, but sometimes it makes perfect sense there (texture2DProj). It won't be useful to have such rigid structs soon. –  Aug 09 '13 at 17:43
  • @Jessy: "*I have no idea what's going to become commonplace for the third dimension of textures*" You say that as though 3D textures are some kind of new-fangled technology that has never existed before. 3D textures are *common* for many applications outside of real-time graphics, and will become moreso as technology catches up. Furthermore, it's *irrelevant* what will "become commonplace". OpenGL has standardized names for these texture coordinates. Other people can call them other things, but those are the *official* OpenGL names, and they aren't changing just because you want them to. – Nicol Bolas Aug 09 '13 at 18:09
  • @NicolBolas: I unmarked your answer as the correct answer for now Nicol Bolas and edited my question. It seems your suggestion isn't the 'only' issue which causes the strange lighting issues, since I fixed the tangent vector problem. It seems I'm still missing something. – Joey Dewd Aug 11 '13 at 12:08
  • @Nicol Bolas It doesn't matter if there are standards, if there was not enough usage data to make good standards. People don't use the standard .stpq. The standards will change because everybody wants them to. –  Aug 13 '13 at 13:00
  • @Jessy: "*The standards will change*" It took half a decade before the OpenGL ARB removed the accumulated redundancies and pointless garbage of fixed functionality from the OpenGL API. They are *not* going to go through another round of removal breaking *reams* of existing code, *just because* some people want to use the letters "UVW" instead of "STPQ". – Nicol Bolas Aug 13 '13 at 13:09
0
 mat3 mat = mat3(t.x, b.x ,n.x, t.y, b.y ,n.y, t.z, b.z ,n.z);

Is wrong. In order to use the tbn matrix correctly, you must transpose it,like so:

 mat3 mat = transpose(mat3(t.x, b.x ,n.x, t.y, b.y ,n.y, t.z, b.z ,n.z));

then use it to transform your light and view vectors into tangent space. Alternatively(and less efficiently), pass the untransposed tbn matrix to the fragment shader, and use it to transform the sampled normal into view space. It's an easy thing to miss, but very important. See http://www.opengl-tutorial.org/intermediate-tutorials/tutorial-13-normal-mapping/ for more info.

On a side note, a minor optimisation you can do for your vertex shader, is to calculate the normal matrix on the cpu, per mesh, as it will be the same for all vertices on the mesh, and so reduce unnecessary calculations.

Ian Young
  • 1,712
  • 1
  • 16
  • 33