4

I have a simple D application using DerelictGL3 and DerelictSDL2. I am trying to render a red triangle using vertex buffer objects however whenever I run glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null);, glGetError() returns 1282 (Invalid Operation). The code is as follows...

app.d

import std.string;
import std.conv;
import std.stdio;
import std.file;
import std.c.stdlib;

import derelict.opengl3.gl3;
import derelict.sdl2.sdl;

void main() {
    DerelictSDL2.load();
    DerelictGL3.load();

    assert(SDL_Init(SDL_INIT_VIDEO) >= 0);

    auto vertex_shader = (cast(string) read("shaders/minimal.vert")).toStringz;
    auto fragment_shader = (cast(string) read("shaders/minimal.frag")).toStringz;

    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);

    auto final_param = SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN;
    auto window = SDL_CreateWindow("Triangle", SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED, 640, 480, final_param);

    assert(window);

    SDL_GL_CreateContext(window);

    DerelictGL3.reload();

    GLfloat[] vertices = [1, 1, 1, -1, -1, 1, -0.8, -1, 1];

    uint vbo;
    glGenBuffers(1, &vbo);
    glBindBuffer(GL_ARRAY_BUFFER, vbo);
    glBufferData(GL_ARRAY_BUFFER, vertices.length * float.sizeof, vertices.ptr, GL_STATIC_DRAW);
    glBindBuffer(GL_ARRAY_BUFFER, 0);

    auto program = glCreateProgram();

    auto vshader = glCreateShader(GL_VERTEX_SHADER);
    glShaderSource(vshader, 1, &vertex_shader, null);
    glCompileShader(vshader);
    glAttachShader(program, vshader);

    auto fshader = glCreateShader(GL_FRAGMENT_SHADER);
    glShaderSource(fshader, 1, &fragment_shader, null);
    glCompileShader(fshader);
    glAttachShader(program, fshader);

    glLinkProgram(program);
    glUseProgram(program);

    auto position = glGetAttribLocation(program, "position");

    auto close = false;

    check();

    while(!close) {
        SDL_Event event;
        while(SDL_PollEvent(&event)) {
            if(event.type == SDL_QUIT) close = true;
        }

        glClearColor(1, 0.4, 0.4, 1);
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

        glBindBuffer(GL_ARRAY_BUFFER, vbo);
        glEnableVertexAttribArray(position);
        check("before"); // Does not return error.
        glVertexAttribPointer(position, 2, GL_FLOAT, GL_FALSE, float.sizeof * 2, null); // <- Causing Invalid Operation GL error.
        check("after"); // Returns error.

        glDrawArrays(GL_TRIANGLES, 0, 3);

        SDL_GL_SwapWindow(window);
    }
}

void check(string msg="") {
    auto error = glGetError();
    assert(error == 0, msg ~ ":" ~ to!string(error));
}

shaders/minimal.vert

#version 120
attribute vec2 position;
void main(void) {
    gl_Position = vec4(position, 0, 1);
}

shaders/minimal.frag

#version 120
void main(void) {
    gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0);
}

When ignoring the error, a red rectangle is rendered in the top right corner of the screen.

Asraelite
  • 288
  • 4
  • 12
  • 2
    I'm not sure about that D specifc libraries and stuff. You do not explictely require a core or compatibility profile with SDL, and I don't know what SDL will do in that case. But if you end up with a GL3.x _core_ profile, vertex array objects are mandatory, and the error would be understandable, since you don't have a VAO bound when you set the pointer. – derhass Aug 16 '14 at 12:00
  • You set stride to 2*float.sizeof, which would mean between each attribute are two unused floats. – weltensturm Aug 19 '14 at 09:02
  • @derhass so can I not use VBOs without requiring a core or compatibility profile? Other examples I've seen include SDL in the same way as above but use VBOs. – Asraelite Aug 19 '14 at 10:19
  • 1
    @Asraelite: I did not say that. I did not talk abot VBOs at all (you can use them in any profile), but _VAOs_, different thing. All I said was that you _must_ use VAOs in core profile, and that I don't know if SDL will create a core profile with the attributes you vae set. – derhass Aug 19 '14 at 16:20
  • If you're using OpenGL 3.2 (as specified in your window creation), why are you using `#version 120` in your shaders? (OpenGL 3.2 uses `#version 150`). – James Zhu Aug 31 '14 at 19:06

2 Answers2

3

You're missing a VAO which is required in OpenGL 3.2. You can create it before other buffers like this:

GLuint vao;
glGenVertexArrays(1,&vao);
glBindVertexArray(vao);
SurvivalMachine
  • 7,946
  • 15
  • 57
  • 87
1

What might help is enabling OpenGL debug logging, you can do it like this: https://github.com/d-gamedev-team/gfm/blob/master/opengl/gfm/opengl/opengl.d#L419

Some drivers like the NVIDIA one provide extensive warning when something go wrong, the problem might not be where you think it is.

ponce
  • 919
  • 5
  • 15