I'm debugging a problem with SSAO and try to visualise my depth buffer. Here's the result:
I'm storing the depth and normals in a single 16-bit RGBA texture. This is my depth pass shader:
// Vertex shader
#version 150 core
#extension GL_ARB_explicit_attrib_location : enable
uniform mat4 _ViewMatrix;
uniform mat4 _ViewProjectionMatrix;
uniform mat4 modelMatrix;
layout (location = 0) in vec4 aPosition;
layout (location = 2) in vec3 aNormal;
out vec4 vPosition;
out vec3 vNormal;
void main()
{
gl_Position = _ViewProjectionMatrix * modelMatrix * aPosition;
mat4 modelViewMatrix = _ViewMatrix * modelMatrix;
vPosition = modelViewMatrix * aPosition;
vNormal = mat3( modelViewMatrix ) * aNormal;
}
// Fragment shader.
#version 150 core
// Calculated as 1.0 / (far - near)
uniform float uLinearDepthConstant;
in vec4 vPosition;
in vec3 vNormal;
out vec4 outDepthNormal;
void main()
{
float linearDepth = -vPosition.z * uLinearDepthConstant;
outDepthNormal = vec4( linearDepth, normalize( vNormal ) );
}
Then I visualise the depth in a shader that renders the texture (I've hard-coded the near and far plane distances):
void main()
{
float depth = texture( depthNormalMap, vTexCoord ).r;
fragColor = vec4((2.0 * 1.0) / (200.0 + 1.0 - depth * (200.0 - 1.0)));
}
Should the result appear smooth or what could be the problem? I'm creating the texture like this:
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA16F, width, height, 0, GL_RGBA, GL_HALF_FLOAT, 0 );