1

The code is pasted below. It draws nothing. triple is the array of points.

program = InitShader("vshader.glsl", "fshader.glsl");
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(triple), triple, GL_STATIC_DRAW);
loc = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 3, GL_FLOAT, GL_TRUE, 0, BUFFER_OFFSET(0));
glDrawArrays(GL_TRIANGLES, 0, n_triple / 3);

Vertex Shader (vshader.glsl)

#version 400
layout (location = 0) in vec3 vPosition; //In my previous code this varible was named VertexPosition.


void main()
{
    gl_Position = vec4(vPosition, 1.0);
}

But if I just delete the linkage of vertex shader and change the drawing method into

glVertexPointer(3, GL_FLOAT, 0, triple);

It works just fine. Even if I enabled the fragment shader. The shader works fine too.

I just don't know what is wrong.

The full code is pasted below:

#include "stdafx.h"
#include <windows.h>
#include <cmath>
#include<cstdlib> 
#include <GL/glew.h>
#include<GL/wglew.h>
#include <GL/glut.h>
#include<iostream>
#include <cstdio>

#define BUFFER_OFFSET( offset )   ((GLvoid*) (offset))
//#pragma comment (lib, "glut32.lib")
using namespace std;
GLuint program, loc;
int w, h;
inline void point3(GLfloat points[], GLfloat x, GLfloat y, GLfloat z, int &i) {
points[i++] = x;
points[i++] = y;
points[i++] = z;
}

inline char* readShaderSource(char* shader)
{

FILE *fp;// = fopen(shader, "r");
fopen_s(&fp, shader, "r");
if (!fp) 
    exit(1);
fseek(fp, 0, SEEK_END);
unsigned long size = ftell(fp);
fseek(fp, 0, SEEK_SET);
char *buf = new char [size+1];
fread_s(buf, size + 1, sizeof(char), size / sizeof(char), fp);
buf[size] = ' ';
fclose(fp);
return buf;
}
GLuint InitShader(char* vertexShader, char* fragmentShader) {
glewInit();
char* vertexSource = readShaderSource(vertexShader), *fragmentSource = readShaderSource(fragmentShader);
GLuint program;
program = glCreateProgram();
GLuint vshader = glCreateShader(GL_VERTEX_SHADER), fshader = glCreateShader(GL_FRAGMENT_SHADER);
glShaderSource(vshader, 1, (const GLchar **)&vertexSource, NULL);
glCompileShader(vshader);

glShaderSource(fshader, 1, (const GLchar **) &fragmentSource, NULL);
glCompileShader(fshader);

GLint compiled;
glGetShaderiv(fshader, GL_COMPILE_STATUS, &compiled);
printf("%d", compiled);
GLint length;
glGetShaderiv(fshader, GL_INFO_LOG_LENGTH, &length);
char *msg = new char[length];
glGetShaderInfoLog(fshader, length, NULL, msg);
printf("\n%s\n", msg);
//glAttachShader(program, vshader);
glAttachShader(program, fshader);
GLint linked;
glLinkProgram(program);
glGetProgramiv(program, GL_LINK_STATUS, &linked);
printf("%d", linked);
glUseProgram(program);
return program;
}


LRESULT CALLBACK WndProc(HWND, UINT, WPARAM, LPARAM); 
int APIENTRY WinMain(HINSTANCE hInstance, 
HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow) {

WCHAR lpszClassName[] = L"Window"; 
WCHAR lpszTitle[] = L"Win32 Project"; 
WNDCLASS wndclass;
wndclass.style = CS_HREDRAW | CS_VREDRAW | CS_OWNDC; 
wndclass.lpfnWndProc = WndProc;
wndclass.cbClsExtra = 0; 
wndclass.cbWndExtra = 0; 
wndclass.hInstance = hInstance; 
wndclass.hIcon = LoadIcon(NULL, IDI_APPLICATION); 
wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); 
wndclass.hbrBackground = NULL; 
wndclass.lpszMenuName = NULL; 
wndclass.lpszClassName = lpszClassName; 
if (!RegisterClass(&wndclass)) 
{
    MessageBeep(0);
    return FALSE;
}

HWND hwnd = CreateWindowW(lpszClassName, lpszTitle, WS_OVERLAPPEDWINDOW,
    CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, nullptr, nullptr, hInstance, nullptr);
ShowWindow(hwnd, nCmdShow); 
UpdateWindow(hwnd); 

MSG msg; 
while (GetMessage(&msg, NULL, 0, 0)) 
{
    TranslateMessage(&msg);
    DispatchMessage(&msg);
}
return msg.wParam; 
}
void SetupPixelFormat(HDC hDC) 
{
int nPixelFormat; 
static PIXELFORMATDESCRIPTOR pfd = { sizeof(PIXELFORMATDESCRIPTOR), 
    1, 
    PFD_DRAW_TO_WINDOW | 
    PFD_SUPPORT_OPENGL | 
    PFD_DOUBLEBUFFER, 
    PFD_TYPE_RGBA, 
    32, 
    0, 0, 0, 0, 0, 0, 
    256,
    0, 
    0, 
    0, 0, 0, 0, 
    16, 
    0,
    0, 
    PFD_MAIN_PLANE, 
    0, 
    0, 0, 0 };
pfd.cStencilBits = 8;
nPixelFormat = ChoosePixelFormat(hDC, &pfd);
SetPixelFormat(hDC, nPixelFormat, &pfd);
}
int InitGL(GLvoid)                         
{
glShadeModel(GL_SMOOTH);                
glClearColor(0.0f, 255.0f, 244.0f, 244.0f);    
glClearDepth(1.0f);                      
glEnable(GL_DEPTH_TEST);                 
glDepthFunc(GL_LEQUAL);                
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGBA | GLUT_DEPTH);
return TRUE;                            
}
void ChangeSize(int width, int height)
{
glViewport(0, 0, width, height);         
glMatrixMode(GL_PROJECTION);             
glLoadIdentity();                       

gluPerspective(45.0f, (GLfloat)width / (GLfloat)height, 0.1f, 100.0f);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
w = width;
h = height;
}

void RenderScene()
{
static bool runned = false;
if (!runned)
{
    runned = true;
//#ifdef DEBUG

    FILE* fpDebugOut = NULL;
    FILE* fpDebugIn = NULL;
    if (!AllocConsole()) MessageBox(NULL, _T("Failed to generate console."), NULL, 0);
    SetConsoleTitle(_T("Debug Window"));
    _tfreopen_s(&fpDebugOut, _T("CONOUT$"), _T("w"), stdout);
    _tfreopen_s(&fpDebugIn, _T("CONIN$"), _T("r"), stdin);
//#endif // DEBUG
}

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 
glLoadIdentity();     

glTranslatef(-2.0f, -0.0f, -5.0f);  
glEnable(GL_POINT_SMOOTH);
glEnable(GL_LINE_SMOOTH);
glHint(GL_POINT_SMOOTH_HINT, GL_NICEST);
glHint(GL_LINE_SMOOTH_HINT, GL_NICEST);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
glEnableClientState(GL_VERTEX_ARRAY); 

const GLfloat toradix = 3.14159265358979324 / 180;
int stepper = 2, r = 2, n_triple = 0;
GLfloat *triple = (GLfloat*)malloc(sizeof(GLfloat) * 4000000);//384480

for (int j = -90 + stepper; j < 90 - stepper;j += stepper)
    for (int i = -180; i <= 180;i += stepper)
    {
        point3(triple, r*cos(j*toradix)*sin(i*toradix), r*cos(i*toradix)*cos(j*toradix), r*sin(j*toradix), n_triple);

        j += stepper;
        point3(triple, r*cos(j*toradix)*sin(i*toradix), r*cos(i*toradix)*cos(j*toradix), r*sin(j*toradix), n_triple);

        i += stepper;
        point3(triple, r*cos(j*toradix)*sin(i*toradix), r*cos(i*toradix)*cos(j*toradix), r*sin(j*toradix), n_triple);

        j -= stepper;
        point3(triple, r*cos(j*toradix)*sin(i*toradix), r*cos(i*toradix)*cos(j*toradix), r*sin(j*toradix), n_triple);
    }
program = InitShader("vshader.glsl", "fshader.glsl");
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
GLuint buffer;
glGenBuffers(1, &buffer);
glBindBuffer(GL_ARRAY_BUFFER, buffer);
glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * n_triple, triple, GL_STATIC_DRAW);
loc = glGetAttribLocation(program, "vPosition");
glEnableVertexAttribArray(loc);
glVertexAttribPointer(loc, 3, GL_FLOAT, GL_TRUE, 0, BUFFER_OFFSET(0));
glClear(GL_COLOR_BUFFER_BIT);
glDrawArrays(GL_TRIANGLES, 0, n_triple / 3);
glFlush();

}
LRESULT CALLBACK WndProc(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam) {
static HGLRC hRC; 
static HDC hDC; 

switch (message) {
case WM_CREATE: {
    hDC = GetDC(hwnd); 
    SetupPixelFormat(hDC); 
    hRC = wglCreateContext(hDC); 
    wglMakeCurrent(hDC, hRC); 
    InitGL();
    return 0;
}
break;
case WM_DESTROY:
{
    wglMakeCurrent(hDC, NULL);
    wglDeleteContext(hRC);
    PostQuitMessage(0); 
}
break;
case WM_SIZE:
{
    ChangeSize(LOWORD(lParam), HIWORD(lParam));
}
break;

case WM_PAINT:
{
    RenderScene();
    SwapBuffers(hDC);
    ValidateRect(hwnd, NULL);
}
break;
default: 
    return DefWindowProc(hwnd, message, wParam, lParam);
}
return 0;
}
Bill Sun
  • 51
  • 5
  • How do you define triple? sizeof is only working with fixed sized arrays, not with dynamically allocated ones. – BDL Oct 12 '15 at 14:08
  • 1
    You use `vPosition` with `glGetAttribLocation` but your variable is named `VertexPosition`. – Colonel Thirty Two Oct 12 '15 at 14:33
  • @BDL, triple is defined as GLfloat *triple = (GLfloat *) malloc (sizeof(GLfloat)*40000); That was miscoded. Sorry about that. But after I change that into glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat)*40000, triple, GL_STATIC_DRAW); it still doesn't draw anything. – Bill Sun Oct 12 '15 at 14:56
  • Have you also changed the things @ColonelThirtyTwo said? – BDL Oct 12 '15 at 15:00
  • @Colonel Thirty Two, this piece of code is the previous version of vertex shader. the shader is now changed into vPosition. Thanks for correcting that. – Bill Sun Oct 12 '15 at 15:01
  • @BDL Yes, I have changed that too. But it still doesn't work. And I've pasted the complete code here too. – Bill Sun Oct 12 '15 at 15:40
  • what is value of the `loc` ... what is inside GLSL compiler/linker log (`glGetShaderInfoLog`) may be the `vPosition` get optimized out or the shader did not compile/link leading to use of default vertex attrib location. Also see [C++ GL+GLSL+VAO/VBO example](http://stackoverflow.com/a/31913542/2521214) to compare what is different and see how `glGetShaderInfoLog` is used. Also specify gfx card drivers nVidia/ATI/Intel often behaves differently if some bug in code is presents (sometimes even if not) – Spektre Oct 14 '15 at 08:41

1 Answers1

0

In OpenGL every vertex attribute (and also uniform variables) has its unique location, a sort of a pointer to that attribute. You can either hardcode that location to some value or let OpenGL decide for you.

// Hardcode locations in vertex shader:
layout(location = 0) in vec3 Position; // This attribute will always have the location 0
layout(location = 1) in vec2 TexCoord; // This one will use location 1

// Any location / hardcoded in program:
attribute vec4 Normal;

More on the use of vertex attributes here.

When using "attribute" to define your vertex attribute you can hardcode its location in your program using glBindAttribLocation​ (see the link). If you don't, you don't know what location it will occupy. Then you need to query it's location using glGetAttribLocation.

In the program you're searching for the location of a vertex attribute called "vPosition" which is not present in the vertex shader. In the vertex shader you define an attribute called "VertexPosition" and assign it to location 0. Try replacing "vPosition" with "VertexPosition" in the glGetAttribLocation function. That way glGetAttribLocation should return 0 because "VertexPosition" is assigned to that location in the vertex shader and the inputs should work.

You can also try to hardcode the value "0" to the var "loc" in your program. This way glEnableVertexAttribArray and glVertexAttribPointer would use the location "0" which has the attribute "VertexPosition" assigned to it in the vertex shader.

Kvaleya
  • 101
  • 5
  • Thanks you. I have changed that. And I'm using the first method to get the VAO. But it still doesn't work. – Bill Sun Oct 12 '15 at 15:43
  • 1
    Try removing `glEnableClientState(GL_VERTEX_ARRAY);`. It is used along with "glVertexPointer" to pass vertex positions into the built-in variable "gl_Vertex" which is used in the fixed function pipeline. You probably don't want to use that in OpenGL 4. gl_Vertex may use the location 0 so it could be messing with your own attribute. – Kvaleya Oct 13 '15 at 05:27
  • Thank you. I tried to remove these lines and bind the Vertex Attribute Object manually to 31 using glBindAttribLocation, It doesn't work. But when I changed it back to glGetAttribLocation it finally worked. I think this may happened because location 31 is occupied by some other procedures. – Bill Sun Oct 17 '15 at 13:35