0

I try to calculate histogram of a image, using OPENGL. I have read some post about that but I still get error. My histogram buffer always return zeros. Some post which I have read:

Maknoll Histogram

Luminance histogram calculation in GPU-android opengl es 3.0

My code:

#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <vector>

// Include GLEW
#include <GL/glew.h>

// Include GLFW
#include <GLFW/glfw3.h>
GLFWwindow* window;

// Include GLM
#include <glm/glm.hpp>
using namespace glm;
#include <SOIL.h>

// Shader sources
const GLchar* vertexSource = "\n"
"#version 330 core\n"
"attribute vec3 inPosition;\n"
"void main()\n"
"{\n"
"    float x = inPosition.x;\n"
"\n"
"    gl_Position = vec4(\n"
"        -1.0 + ((x) * 0.0078125),\n"
"        -1,\n"
"        0.0,\n"
"        1.0\n"
"    );\n"
"}\n";

const GLchar* fragmentSource = "\n"
"#version 330 core\n"
"out vec4 outputColor;\n"
"void main()\n"
"{\n"
"    outputColor = vec4(1.0, 1.0, 1.0, 1.0);\n"
"}\n";

void CheckStatus(GLuint obj)
{
    GLint status = GL_FALSE, len = 10;
    if( glIsShader(obj) )   glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
    if( glIsProgram(obj) )  glGetProgramiv( obj, GL_LINK_STATUS, &status );
    if( status == GL_TRUE ) return;
    if( glIsShader(obj) )   glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
    if( glIsProgram(obj) )  glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
    std::vector< char > log( len, 'X' );
    if( glIsShader(obj) )   glGetShaderInfoLog( obj, len, NULL, &log[0] );
    if( glIsProgram(obj) )  glGetProgramInfoLog( obj, len, NULL, &log[0] );
    std::cerr << &log[0] << std::endl;
    exit( -1 );
}

GLfloat buffer[256];
GLuint hist[256];
float _image[512*512*3];

int main()
{
    // Initialise GLFW
    if( !glfwInit() )
    {
        fprintf( stderr, "Failed to initialize GLFW\n" );
        getchar();
        return -1;
    }

    glfwWindowHint(GLFW_SAMPLES, 4);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    int width, height;
    unsigned char* image = SOIL_load_image("sample_gray.bmp", &width, &height, 0, SOIL_LOAD_RGB);
    unsigned char image_gray[width * height];
    printf("%d\t%d\n", width, height);

    for (int i = 0; i < width * height; ++i)
    {
        image_gray[i] = image[i * 3];
        _image[i * 3] = image[i * 3];
        _image[i * 3 + 1] = image[i * 3 + 1];
        _image[i * 3 + 2] = image[i * 3 + 2];
    }

    for (int i = 0; i < width * height; ++i)
    {
        hist[image_gray[i]]++;
    }

    // Open a window and create its OpenGL context
    window = glfwCreateWindow(width, height, "Basic", NULL, NULL);
    if( window == NULL ){
        fprintf( stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n" );
        getchar();
        glfwTerminate();
        return -1;
    }
    glfwMakeContextCurrent(window);

    // Initialize GLEW
    glewExperimental = true; // Needed for core profile
    if (glewInit() != GLEW_OK) {
        fprintf(stderr, "Failed to initialize GLEW\n");
        getchar();
        glfwTerminate();
        return -1;
    }

    // Ensure we can capture the escape key being pressed below
    glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);

    // Dark blue background
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);

    GLuint vbo;
    glGenBuffers(1, &vbo);
    glBindBuffer(GL_ARRAY_BUFFER, vbo);

    GLuint num_input_data = width * height;

    /* Upload data */
    glBufferData(GL_ARRAY_BUFFER, num_input_data * sizeof(float) * 3, _image, GL_STATIC_DRAW);

    GLuint vertexShader, fragmentShader, shaderProgram;
    // Create and compile the vertex shader
    vertexShader = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vertexShader, 1, &vertexSource, NULL);
    glCompileShader(vertexShader);
    CheckStatus(vertexShader);
    // Create and compile the fragment shader
    fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
    glCompileShader(fragmentShader);
    CheckStatus(fragmentShader);
    // Link the vertex and fragment shader into a shader program
    shaderProgram = glCreateProgram();
    glAttachShader(shaderProgram, vertexShader);
    glAttachShader(shaderProgram, fragmentShader);
    glBindFragDataLocation(shaderProgram, 0, "outputColor");
    glLinkProgram(shaderProgram);


    CheckStatus(shaderProgram);

    glUseProgram(shaderProgram);


    // Specify the layout of the vertex data
    GLint posAttrib = glGetAttribLocation(shaderProgram, "inPosition");
    glEnableVertexAttribArray(posAttrib);
    glVertexAttribPointer(posAttrib, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), 0);

    GLuint tex;
    GLuint fbo;
    glGenTextures(1, &tex);
    glBindFramebuffer(GL_FRAMEBUFFER, fbo);

    glGenFramebuffers(1, &fbo);
    glActiveTexture(GL_TEXTURE0);
    // glBindTexture(GL_TEXTURE_2D, tex);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    glTexStorage2D(GL_TEXTURE_2D, 1, GL_R32F, 256, 1);
    // glBindTexture(GL_TEXTURE_2D, 0);

    glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, tex, 0);

    /* Clear buffer */
    glClearColor(1.0, 0.0, 0.0, 1.0);
    glClear(GL_COLOR_BUFFER_BIT);

    glBlendEquation(GL_FUNC_ADD);
    glBlendFunc(GL_ONE, GL_ONE);
    glEnable(GL_BLEND);

    /* Init viewport */
    glViewport(0, 0, 256, 1);
    glUseProgram(shaderProgram);
    /* Draw */
    glDrawArrays(GL_POINTS, 0, num_input_data);


    glReadPixels(0, 0, 256, 1, GL_RED, GL_FLOAT, buffer);

    for (int i = 0; i < 256; ++i)
    {
        printf("%d\t%f\t%d\n", i, buffer[i], hist[i]);
    }

}

Could anybody help me,thanks in advance.

I have updated my code, it's working now :D

#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <vector>
// Include GLEW
#include <GL/glew.h>

// Include GLFW
#include <GLFW/glfw3.h>
GLFWwindow* window;

// Include GLM
#include <glm/glm.hpp>
using namespace glm;
#include <SOIL.h>

// Shader sources
const GLchar* vertexSource = "\n"
"#version 330 core\n"
"in vec3 inPosition;\n"
"void main()\n"
"{\n"
"    float x = inPosition.x;\n"
"\n"
"    gl_Position = vec4(\n"
"        -1.0 + ((x + 1) * 0.0078125),\n"
"        0.0,\n"
"        0.0,\n"
"        1.0\n"
"    );\n"
"}\n";

const GLchar* fragmentSource = "\n"
"#version 330 core\n"
"out vec4 outputColor;\n"
"void main()\n"
"{\n"
"    outputColor = vec4(1.0, 1.0, 1.0, 1.0);\n"
"}\n";

void CheckStatus(GLuint obj)
{
    GLint status = GL_FALSE, len = 10;
    if( glIsShader(obj) )   glGetShaderiv( obj, GL_COMPILE_STATUS, &status );
    if( glIsProgram(obj) )  glGetProgramiv( obj, GL_LINK_STATUS, &status );
    if( status == GL_TRUE ) return;
    if( glIsShader(obj) )   glGetShaderiv( obj, GL_INFO_LOG_LENGTH, &len );
    if( glIsProgram(obj) )  glGetProgramiv( obj, GL_INFO_LOG_LENGTH, &len );
    std::vector< char > log( len, 'X' );
    if( glIsShader(obj) )   glGetShaderInfoLog( obj, len, NULL, &log[0] );
    if( glIsProgram(obj) )  glGetProgramInfoLog( obj, len, NULL, &log[0] );
    std::cerr << &log[0] << std::endl;
    exit( -1 );
}

void _check_gl_error(int line) 
{
    GLenum err (glGetError());

    while(err!=GL_NO_ERROR) 
    {
        std::string error;

        switch(err) 
        {
            case GL_INVALID_OPERATION:      error="INVALID_OPERATION";      break;
            case GL_INVALID_ENUM:           error="INVALID_ENUM";           break;
            case GL_INVALID_VALUE:          error="INVALID_VALUE";          break;
            case GL_OUT_OF_MEMORY:          error="OUT_OF_MEMORY";          break;
            case GL_INVALID_FRAMEBUFFER_OPERATION:  error="INVALID_FRAMEBUFFER_OPERATION";  break;
        }

        std::cerr << "GL_" << error.c_str() <<":"<<line<<std::endl;
        err=glGetError();
    }
}

GLfloat buffer[256];
GLuint hist[256];
float _image[512*512*3];

int main()
{
    // Initialise GLFW
    if( !glfwInit() )
    {
        fprintf( stderr, "Failed to initialize GLFW\n" );
        getchar();
        return -1;
    }

    glfwWindowHint(GLFW_SAMPLES, 4);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);

    int width, height;
    unsigned char* image = SOIL_load_image("sample_gray.bmp", &width, &height, 0, SOIL_LOAD_RGB);
    unsigned char* image_gray = new unsigned char[width * height];

    printf("%d\t%d\n", width, height);


    for (int i = 0; i < width * height; ++i)
    {
        image_gray[i] = image[i * 3];
        _image[i * 3] = image[i * 3];
        _image[i * 3 + 1] = image[i * 3 + 1];
        _image[i * 3 + 2] = image[i * 3 + 2];
    }

    for (int i = 0; i < width * height; ++i)
    {
        hist[image_gray[i]]++;
    }

    // Open a window and create its OpenGL context
    window = glfwCreateWindow(width, height, "Basic", NULL, NULL);
    if( window == NULL ){
        fprintf( stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.\n" );
        getchar();
        glfwTerminate();
        return -1;
    }
    glfwMakeContextCurrent(window);

    // Initialize GLEW
    glewExperimental = true; // Needed for core profile
    if (glewInit() != GLEW_OK) {
        fprintf(stderr, "Failed to initialize GLEW\n");
        getchar();
        glfwTerminate();
        return -1;
    }
    _check_gl_error(__LINE__);
    // Ensure we can capture the escape key being pressed below
    glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);

    // Dark blue background
    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);


    GLuint vao;
    glGenVertexArrays(1, &vao);
    glBindVertexArray(vao);

    GLuint vbo;
    glGenBuffers(1, &vbo);
    glBindBuffer(GL_ARRAY_BUFFER, vbo);

    GLuint num_input_data = width * height;

    /* Upload data */
    glBufferData(GL_ARRAY_BUFFER, num_input_data * sizeof(float) * 3, _image, GL_STATIC_DRAW);

    GLuint vertexShader, fragmentShader, shaderProgram;
    // Create and compile the vertex shader
    vertexShader = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vertexShader, 1, &vertexSource, NULL);
    glCompileShader(vertexShader);
    CheckStatus(vertexShader);
    // Create and compile the fragment shader
    fragmentShader = glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fragmentShader, 1, &fragmentSource, NULL);
    glCompileShader(fragmentShader);
    CheckStatus(fragmentShader);
    // Link the vertex and fragment shader into a shader program
    shaderProgram = glCreateProgram();
    glAttachShader(shaderProgram, vertexShader);
    glAttachShader(shaderProgram, fragmentShader);
    glBindFragDataLocation(shaderProgram, 0, "outputColor");
    glLinkProgram(shaderProgram);


    CheckStatus(shaderProgram);

    glUseProgram(shaderProgram);


    // Specify the layout of the vertex data
    GLint posAttrib = glGetAttribLocation(shaderProgram, "inPosition");
    glEnableVertexAttribArray(posAttrib);
    glVertexAttribPointer(posAttrib, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), 0);

    GLuint tex;
    GLuint fbo;
    glGenFramebuffers(1, &fbo);
    glBindFramebuffer(GL_FRAMEBUFFER, fbo);

    glGenTextures(1, &tex);
    glActiveTexture(GL_TEXTURE0);
    glBindTexture(GL_TEXTURE_2D, tex);
    glTexImage2D(GL_TEXTURE_2D, 0, GL_R32F, 256, 1, 0, GL_RED, GL_FLOAT, NULL);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
    // glTexStorage2D(GL_TEXTURE_2D, 1, GL_R32F, 256, 1);

    glBindTexture(GL_TEXTURE_2D, 0);

    glFramebufferTexture(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, tex, 0);

    glBindFramebuffer(GL_FRAMEBUFFER, 0);

    _check_gl_error(__LINE__);

    glBindFramebuffer(GL_FRAMEBUFFER, fbo);
    /* Clear buffer */
    glClearColor(0.0, 0.0, 0.0, 1.0);
    glClear(GL_COLOR_BUFFER_BIT);

    glBlendEquation(GL_FUNC_ADD);
    glBlendFunc(GL_ONE, GL_ONE);
    glEnable(GL_BLEND);
    _check_gl_error(__LINE__);


    /* Init viewport */
    glViewport(0, 0, 256, 1);
    glUseProgram(shaderProgram);
    /* Draw */
    glDrawArrays(GL_POINTS, 0, num_input_data);


    glReadPixels(0, 0, 256, 1, GL_RED, GL_FLOAT, buffer);
    _check_gl_error(__LINE__);
    for (int i = 0; i < 256; ++i)
    {
        printf("%d\t%f\t%d\n", i, buffer[i], hist[i]);
    }
    free(image_gray);
}

Special thank to @Vallentin for your enthusiasm!

Community
  • 1
  • 1
Programmer dude
  • 167
  • 1
  • 5
  • 23
  • First thing I see is that you bind `fbo` before you generate it. Second: Your `CheckStatus` method only works if there are no shader object and shader program with the same id. All handles are just unsigned integers and it is perfectly valid for a OpenGL implementation to give you a shader object 0 and a shader program object 0. In that case your method would query both, link status and compile status. – BDL Apr 01 '17 at 18:13
  • @BDL, Thank you. I updated my code below. Can u help me check it again. – Programmer dude Apr 02 '17 at 02:21

1 Answers1

2

My histogram buffer always return zeros.

Interesting. Because I don't know which compiler that let's something like this fly by:

int width, height;
unsigned char* image = SOIL_load_image("sample_gray.bmp", &width, &height, 0, SOIL_LOAD_RGB);
unsigned char image_gray[width * height];

Where you'd need to do:

unsigned char *image_gray = new unsigned char[width * height];

Remember to later free the memory delete[] image_gray.

Your shader also doesn't compile or maybe your driver is a bit more lean than mine. As given #version 330 core you can't use attribute and would have to use in.

in vec3 inPosition;

As is CheckStatus() also tells this:

0(3) : error C7555: 'attribute' is deprecated, use 'in/out' instead

You're also trying to bind a framebuffer you don't even have yet.

GLuint fbo;
glBindFramebuffer(GL_FRAMEBUFFER, fbo);
[...]
glGenFramebuffers(1, &fbo);

You'd need to flip that around:

GLuint fbo;
glGenFramebuffers(1, &fbo);
glBindFramebuffer(GL_FRAMEBUFFER, fbo);

It also puzzles me why your compiler would let that slide, as MSVC screams at me.

Error C4700 uninitialized local variable 'fbo'

You're also not binding the texture before applying any changes to it.

// glBindTexture(GL_TEXTURE_2D, tex);

So uncomment that glBindTexture() call.

You're also not creating any vertex arrays. Which checking glGetError() would have told.

GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);

Do so before calling glEnableVertexAttribArray().

Also your clear color is red. So that's not going to help you that much. As now your call to glReadPixels() would only yield 1.0.

glClearColor(0.0, 0.0, 0.0, 1.0)

Now glReadPixels() still only yield zero. However unbinding or not creating the framebuffer at all. Then it gives what I assume is the desired result. Since not using the framebuffer shouldn't be so, I'm suspecting that there's something wrong with it. I can't however seem to pin point it by glancing over your code. But as the code is riddled with problems already, this is a fair starting point.

Additional:

  • You're targeting OpenGL 3.3 while glTexStorage2D() is core in 4.2. Point being, that if you want to make sure glTexStorage2D() is supported, target 4.2.
  • The call to glActiveTexture() is redundant.
  • Since your texture has a height of 1, consider using a 1D texture.
  • Remember to check glGetError() or better yet utilize Debug Output

Edit

First of all You can't call _check_gl_error() before having created the window (and context) and called glewInit().

Instead of manually adding the line number you could use __LINE__. Thus _check_gl_error(__LINE__).

Again don't use a red clear color. Use a black clear color glClearColor(0.0, 0.0, 0.0, 1.0). As blending 1.0 and say 0.1 results in 1.1, which is clamped back to 1.0. Thus the red color channel should be 0.0 from the get-go.

I realized the problem. In your vertex shader you're manually setting y to -1.0. This ends up falling out of the screen, given that you're drawing GL_POINTS. Setting it to y to -0.9999 seems to give the desired result now. However relying on this is like playing with fire.

If you now run the application you'll now see a mixture of 0.0 and 1.0. The reason you get 1.0 is because in your fragment shader you're setting outputColor for the red channel as 1.0. Again summing all those might be some crazy value, but in the end it'll get clamped back down to 1.0.

Instead try with:

outputColor = vec4(0.005, 1.0, 1.0, 1.0);

Now you should see the output increasing and decreasing, instead of being either 0.0 or 1.0. However note that if hist[i] is greater than 200, then any that are so will result in 1.0. Because 1 / 0.005 = 200.

Everything is at least working now.

vallentin
  • 23,478
  • 6
  • 59
  • 81
  • Thank for reply @Vallentin. I have updated my code with points you recommend. I add `_check_gl_error` function using `glGeterror()` to check error. However, my buffer still return zeros (1.0 if i clear color to red, as you said). Can you check my code again? Thank you. One more thing, Can you sugget me replace function `glTexStorage1D` to other function? – Programmer dude Apr 02 '17 at 02:17
  • Thank you very much @Vallentin. It's working now :)) Although I need fix a bit in your answer, it is very helpful. It help me a lot. Thank you again. – Programmer dude Apr 04 '17 at 15:22