2

I'm taking a Computer Graphics course at my university. I need to implement basic line drawing algorithms in modern OpenGL(3.3+) to draw primitives on the screen. Here's the function for Bresenham's Line Drawing Algorithm that I want to implement -

void bresenham(GLint xStart, GLint yStart, GLint xEnd, GLint yEnd) {
    if (!(xStart < xEnd)) {
    swap(xStart, xEnd);
    swap(yStart, yEnd); 
    }

    GLint dx = xEnd - xStart;
    GLint dy = yEnd - yStart;
    GLint p = 2 * dy - dx;

    GLint x = xStart;
    GLint y = yStart;
    setPixel(x,y);

    while (x < xEnd) {
        x += 1;
        if (p < 0)
            p += (2 * dy);
        else {
            p += (2 * (dy - dx));
            y += 1;
        setPixel(x,y);
        }
    }
}

I'm clueless on how to realise the setPixel() function. Most answers I found here and elsewhere use older OpenGL functions -

void setPixel(int x, int y)
{
    glColor3f(0.0, 0.0, 0.0); //Set pixel to black  
    glBegin(GL_POINTS);
    glVertex2i(x, y); //Set pixel coordinates 
    glEnd();
    glFlush(); //Render pixel
}

What is the equivalent way to do this in OpenGl 3.3+? Assuming I can add the "pixels" to an std::vector array, how do I initialise the vertex buffer array to store this data?

Another problem I ran into while trying to plot a point using GL_POINTS is that due to clipping during conversion to normalised device coordinates, points beyond the range [-1,1] in either direction do not show on the window.

For example, only the first three points show up on the window screen. See the initialize() function -

#include <stdio.h>
#include <stdlib.h>

#include <GL/glew.h>
#include <GLFW/glfw3.h>
#include <cstdlib>
#include <vector>
#include <iostream>
#include <fstream>

// Read a shader source from a file
// store the shader source in a std::vector<char>
void read_shader_src(const char* fname, std::vector<char> &buffer);

// Compile a shader
GLuint load_and_compile_shader(const char *fname, GLenum shaderType);

// Create a program from two shaders
GLuint create_program(const char *path_vert_shader, const char *path_frag_shader);

// Render scene
void display(GLuint &vao, GLFWwindow* window);

// Initialize the data to be rendered
void initialize(GLuint &vao);


//GLFW Callbacks
static void error_callback(int error, const char* description) {
    fprintf(stderr, "Error: %s\n", description);
}

static void key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
    if(key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) {
        glfwSetWindowShouldClose(window, GL_TRUE);
    }
}

int main() {
    glfwSetErrorCallback(error_callback);
    //Initialize GLFW
    if (!glfwInit()) {
        fprintf(stderr, "Failed to initialize GLFW.\n");
        return -1;
    }

    //Set GLFW window settings and create window
    glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE);
    GLFWwindow* window = glfwCreateWindow(500, 500, "My window", NULL, NULL);
    if(!window) {
        fprintf(stderr, "Window or context creation failed.\n");
        return -1;
    }

    glfwSetKeyCallback(window, key_callback);
    glfwMakeContextCurrent(window);

    //Initialize GLEW
    glewExperimental = GL_TRUE;
    if(glewInit() != GLEW_OK) {
        fprintf(stderr, "Failed to initialize glew");
        glfwTerminate();
        return -1;
    }
    //Create a vertex array object
    GLuint vao;

    //Initialize the data to be rendered
    initialize(vao);

    while (!glfwWindowShouldClose(window)) {
        display(vao, window);
        glfwPollEvents();
    }
    glfwTerminate();
    return 0;
}

//Render scene
void display(GLuint &vao, GLFWwindow* window) {
    //Red background
    glClearColor(1.0f, 0.0f, 0.0f, 0.0f); 
    glClear(GL_COLOR_BUFFER_BIT);

    glBindVertexArray(vao);
    glDrawArrays(GL_POINTS, 0, 12);
    // Swap front and back buffers
    glfwSwapBuffers(window);
}

void initialize(GLuint &vao) {
    glEnable(GL_PROGRAM_POINT_SIZE);
    // Use a Vertex Array Object
    glGenVertexArrays(1, &vao);
    glBindVertexArray(vao);

    //Store verex positions in an array
    GLfloat vertices[24] = {
        0.0, 0.0, // Only these
        0.5, 0.5, //three points show up
        1.0, 1.0, //on the window screen

        4.0, 4.0,
        5.0, 5.0,
        6.0, 6.0,

        7.0, 7.0,
        8.0, 8.0,
        9.0, 9.0,

        10.0, 10.0,
        11.0, 11.0,
        12.0, 12.0,
    };

    //Create a vertex buffer object to store the vertex data
    GLuint vbo;
    //Generates 1 buffer object name and stores it in vbo
    glGenBuffers(1, &vbo);
    //Bind the buffer object to the buffer binding target
    glBindBuffer(GL_ARRAY_BUFFER, vbo);
    //Creates and initializes the buffer object's data store(with data from vertices)
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

    GLuint shaderProgram = create_program("/Users/.../vert.shader", "/Users/.../frag.shader"); //path to shader files

    // Get the location of the attributes that enters in the vertex shader
    GLint posAttrib = glGetAttribLocation(shaderProgram, "position");

    // Specify how the data for position can be accessed
    glVertexAttribPointer(posAttrib, 2, GL_FLOAT, GL_FALSE, 0, 0);

    //Enable the attribute
    glEnableVertexAttribArray(posAttrib);
}

// Read a shader source from a file
// store the shader source in a std::vector<char>
void read_shader_src(const char *fname, std::vector<char> &buffer) {
    std::ifstream in;
    in.open(fname, std::ios::binary);

    if(in.is_open()) {
        // Get the number of bytes stored in this file
        in.seekg(0, std::ios::end);
        size_t length = (size_t)in.tellg();

        // Go to start of the file
        in.seekg(0, std::ios::beg);

        // Read the content of the file in a buffer
        buffer.resize(length + 1);
        in.read(&buffer[0], length);
        in.close();
        // Add a valid C - string end
        buffer[length] = '\0';
    }
    else {
        std::cerr << "Unable to open " << fname << " I'm out!" << std::endl;
        exit(-1);
    }
}

//Compile a shader
GLuint load_and_compile_shader(const char* fname, GLenum shaderType) {
    //Load a shader from an external file
    std::vector<char> buffer;
    read_shader_src(fname, buffer);
    const char *src = &buffer[0];

    //Create and compile the shader
    GLuint shader = glCreateShader(shaderType);
    glShaderSource(shader, 1, &src, NULL);
    glCompileShader(shader);

    GLint shader_compiled;
    glGetShaderiv(shader, GL_COMPILE_STATUS, &shader_compiled);
    if(!shader_compiled) {
        GLchar message[1024];
        glGetShaderInfoLog(shader, 1024, NULL, message);
        std::cerr << "Shader compilation failed.";
        std::cerr << "Log: " << &message << std::endl;
        glfwTerminate();
        exit(-1);
    }
    return shader;
}

// Create a program from two shaders
GLuint create_program(const char *path_vert_shader, const char *path_frag_shader) {
    // Load and compile the vertex and fragment shaders
    GLuint vertexShader = load_and_compile_shader(path_vert_shader, GL_VERTEX_SHADER);
    GLuint fragmentShader = load_and_compile_shader(path_frag_shader, GL_FRAGMENT_SHADER);

    // Attach the above shader to a program
    GLuint shaderProgram = glCreateProgram();
    glAttachShader(shaderProgram, vertexShader);
    glAttachShader(shaderProgram, fragmentShader);

    // Flag the shaders for deletion
    glDeleteShader(vertexShader);
    glDeleteShader(fragmentShader);

    // Link and use the program
    glLinkProgram(shaderProgram);
    glUseProgram(shaderProgram);

    return shaderProgram;
}

What modifications do I make so I can plot the rest of the points?

Vertex Shader -

#version 150 core

in vec4 position;

void main() {
    gl_Position = position;
    gl_PointSize = 10.0;
}

Fragment Shader -

#version 150 core

out vec4 out_color;

void main() {
    out_color = vec4(1.0, 1.0, 1.0, 1.0);
}
pius
  • 119
  • 2
  • 2
  • 13
  • 6
    I understand that this is just for practice, but setting separate pixels using OpenGL to draw primitives kinda denies the purpose of GL... – HolyBlackCat Sep 10 '16 at 20:44
  • Do you only have to draw lines or do you have to use Bresenham's algorithm? – pleluron Sep 10 '16 at 20:50
  • @pleluron I have to use the algorithm for the "rasterisation". I know I could have used `GL_LINES` otherwise. – pius Sep 10 '16 at 20:53
  • Simply store the x and y in sequence in an array and use `glDrawArrays()` with GL_POINTS to render the lines. As per the -1,1 create a 2D projection matrix, maybe use glm and send it to your shader and simply multiply points by it. – Harish Sep 10 '16 at 21:03
  • @HolyBlackCat I know. However, choosing between OpenGL and trying to get an ancient `` library from Borlands Graphic Interface for DOS(what my teacher recommends) work on my OS X computer, you can guess where my choice fell. – pius Sep 10 '16 at 21:05
  • 1
    This may be a bit of a silly way of doing it, but you could draw the line w/ Bresenham's algorithm to a texture on the CPU end and then draw that quad to the screen normally. I can write up a more detailed answer if you're interested – Robert Rouhani Sep 11 '16 at 00:34

1 Answers1

3

From the comments, it sounds like you're trying to use OpenGL in place of a really old graphics library that is required for a class. Since computer graphics have changed so much that what you're trying to do in modern OpenGL is unreasonable, you should try doing this for the current assignment and your later ones:

Disclaimer: This is not a reasonable way to draw a line in modern OpenGL

  1. Create a 2d array of some arbitrary size, large enough that the entire line can be drawn on it.
  2. Draw the line using the original function, create some setPixel function that changes elements in that array
  3. Once you're done drawing the line (or doing whatever else future assignments will have you do), create an OpenGL texture from that array. An excellent guide is available here: https://open.gl/textures

Some rough psuedocode:

GLuint tex;
glGenTextures(1, &tex);
glBindTexture(GL_TEXTURE_2D, tex);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_FLOAT, pixels);
glBindTexture(GL_TEXTURE_2D, 0);

Then you would create a quad (really just two triangles) that draw this texture to screen. Following the open.gl guide should work well since they already do this for their textures. Pulling from their provided code (which does a bit extra, all correct and following the spec though):

GLfloat vertices[] = {
//  Position      Color             Texcoords
    -0.5f,  0.5f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, // Top-left
     0.5f,  0.5f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, // Top-right
     0.5f, -0.5f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, // Bottom-right
    -0.5f, -0.5f, 1.0f, 1.0f, 1.0f, 0.0f, 1.0f  // Bottom-left
};

GLuint elements[] = {
    0, 1, 2,
    2, 3, 0
};

// Set up buffer objects, create shaders, initialize GL, etc.

//drawing
//bind buffers, enable attrib arrays, etc
glBindTexture(GL_TEXTURE_2D, tex);

glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
Robert Rouhani
  • 14,512
  • 6
  • 44
  • 59
  • 1
    If you want to get really fancy you could get yourself a persistently mappable pixel buffer object. With that your pixel pushing operations would happen (logically) directly on graphics memory and the finishing glTexSubImage would only update the texture object. In that line you could even go bleeding-edge and use the Vulkan API instead, where a "texture" is actually just metadata for how to interpret the data in an arbitrarily accessible buffer as image data. – datenwolf Sep 11 '16 at 10:18
  • Can you elaborate on points 1. and 2.? More specifically, I want to know how to declare and initialise the `pixel` variable passed to `glTexImage2D`. The tutorial only shows how to load textures from a pre-existing image. – pius Sep 17 '16 at 05:19
  • @datenwolf Based on your answer here(http://stackoverflow.com/a/18369213/5306573 ): "However a texture might not be the most efficient way to directly update single pixels on the screen either. It is however a great idea to first draw pixels of an pixel buffer, which for display is loaded into a texture, that then gets drawn onto a full viewport quad." Any hints on how to do this (drawing pixels on a pixel buffer); which I presume can then be passed in a call to `glTexImage2D`? – pius Sep 17 '16 at 14:48
  • @ByteMan2021 could be as simple as `float pixels[w * h * 3];`, with a `setPixel` implementation as simple as `pixels[(y * w * 3) + (x * 3)] = 1; pixels[(y * w * 3) + (x * 3) + 1] = 1; pixels[(y * w * 3) + (x * 3) + 2] = 1;` The threes in this case refer to the R, G, and B channels of the texture I described above. You can always change the other parameters of `glTexImage2D` to simplify this to a grayscale texture. – Robert Rouhani Sep 17 '16 at 16:22
  • Worked like a charm!! Thanks a lot. The only catch I think is that `w` and `h` must be reasonably large so that the top rightmost point can be plotted without getting an index out of bounds error in the `setPixel` function. – pius Sep 17 '16 at 18:26