0

I'm kinda new to OpenGL, I am currently trying to make a class which can handle drawing vertices into the screen and it doesn't seem to work, it doesn't draw anything. I know that this problem has nothing to do with shaders cz It works when I do it without a class (directly in the main.cpp file).

Model.h

#include <glad/glad.h>
class Model
{
private:
    GLuint VAO, VBO, EBO;

public:
    Model(float vertices[], unsigned int indices[]);
    void Draw();
};

Model.cpp

#include "Model.h"
Model::Model(float vertices[], unsigned int indices[])
{
    glGenVertexArrays(1, &this->VAO);
    glGenBuffers(1, &this->VBO);
    glGenBuffers(1, &this->EBO);

    glBindVertexArray(this->VAO);

    glBindBuffer(GL_ARRAY_BUFFER, this->VBO);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);

    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, this->EBO);
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(indices), indices, GL_STATIC_DRAW);

    glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(float), (void*)0);
    glEnableVertexAttribArray(0);

    glBindBuffer(GL_ARRAY_BUFFER, 0);
    glBindVertexArray(0);
}

void Model::Draw()
{
    glBindVertexArray(VAO);
    glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_INT, 0);
}

Main.cpp

#include <iostream>
#include "Model.h"
#include "Shader.h"
#include "Window.h"

int main()
{
std::cout << "Start" << std::endl;

float vertices[] = {
     0.5f,  0.5f, 0.0f,
     0.5f, -0.5f, 0.0f,
    -0.5f, -0.5f, 0.0f,
    -0.5f,  0.5f, 0.0f
};
unsigned int indices[] = {
    0, 1, 3,
    1, 2, 3
};

Window window("MyWindow", 800, 600);
window.Show();

Model square(vertices, indices);

Shader testShader("testVertexShader.glsl", "testFragmentShader.glsl");

glViewport(0, 0, 800, 600);

while (!window.ShouldClose())
{
    /*glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
    glClear(GL_COLOR_BUFFER_BIT);*/     

    testShader.Use();
    square.Draw();

    window.Update();
}

testShader.Delete();
window.Close();

std::cout << "End" << std::endl;

return 0;

}

Rabbid76
  • 202,892
  • 27
  • 131
  • 174
Wallop
  • 1

1 Answers1

3

Nothing to do with classes or with OpenGL, but a common mistake concerning arrays and pointers.

The error is here

sizeof(vertices)

Within this constructor vertices is a pointer. And therefore sizeof(vertices) gives you the size of the pointer, not the size of the original array.

Same error with sizeof(indices).

Pass the size you require in as a separate parameter, or do what C++ programmers should do and use a vector instead of an array.

Model::Model(const std::vector<float>& vertices, const std::vector<unsigned int>& indices)
{
    ...
    glBufferData(GL_ARRAY_BUFFER, vertices.size()*sizeof(vertices[0]), vertices.data(), GL_STATIC_DRAW);
    ...
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indices.size()*sizeof(indices[0]), indices.data(), GL_STATIC_DRAW);
    ...
}
john
  • 85,011
  • 4
  • 57
  • 81