0

My linux system is having an issue compiling the Vertex Shaders.

First, here are the errors I am getting

Vertex Shader compilation failed: 0:1(10): error: GLSL 4.30 is not supported. Supported versions are: 1.10, 1.20, 1.30, 1.00 ES, 3.00 ES, 3.10 ES, and 3.20 ES

Vertex Shader compilation failed: 0:1(10): error: GLSL 4.30 is not supported. Supported versions are: 1.10, 1.20, 1.30, 1.00 ES, 3.00 ES, 3.10 ES, and 3.20 ES

Shader linking failed: error: linking with uncompiled/unspecialized shadererror: linking with uncompiled/unspecialized shader

This is the vertex shader

#version 430 core

layout (location = 0) in vec4 vPosition;
void main(){


     gl_Position = vPosition;
     gl_PointSize = 50.0;

}

This is the fragment shader

#version 430 core

out vec4 fColor;

void main(){


     fColor = vec4(1.0, 0.0, 0.0, 1.0);
}

This is the C code

#include <GL/glew.h>
#include <GL/freeglut.h>
#include <stdio.h>
#include <stdlib.h>

GLuint  vao;
GLuint  vbo;
GLuint v, f;

GLfloat vertices[3][2] = { { -0.90, -0.90 },  // Triangle 1
                           { 0.90, -0.90 },
                           { -0.90,  0.90 },
                         };


char* ReadFile(const char* filename);
GLuint initShaders(char* v_shader, char* f_shader);
void init();
void display(void);

/************************************************/

char* ReadFile(const char* filename) {

    FILE* infile;
#ifdef WIN32
    fopen_s(&infile, filename, "rb");
#else
    infile = fopen(filename, "rb");
#endif


    if (!infile) {
        printf("Unable to open file %s\n", filename);
        return NULL;
    }

    fseek(infile, 0, SEEK_END);
    int len = ftell(infile);
    fseek(infile, 0, SEEK_SET);
    char* source = (char*)malloc(len + 1);
    fread(source, 1, len, infile);
    fclose(infile);
    source[len] = 0;
    return (source);

}

/*************************************************************/

GLuint initShaders(const char* v_shader, const char* f_shader) {

    GLuint p = glCreateProgram();

    v = glCreateShader(GL_VERTEX_SHADER);
    f = glCreateShader(GL_FRAGMENT_SHADER);

    const char * vs = ReadFile(v_shader);
    const char * fs = ReadFile(f_shader);

    glShaderSource(v, 1, &vs, NULL);
    glShaderSource(f, 1, &fs, NULL);

    free((char*)vs);
    free((char*)fs);

    glCompileShader(v);

    GLint compiled;

    glGetShaderiv(v, GL_COMPILE_STATUS, &compiled);
    if (!compiled) {
        GLsizei len;
        glGetShaderiv(v, GL_INFO_LOG_LENGTH, &len);

        char* log = (char*)malloc(len + 1);

        glGetShaderInfoLog(v, len, &len, log);

        printf("Vertex Shader compilation failed: %s\n", log);

        free(log);

    }

    glCompileShader(f);
    glGetShaderiv(f, GL_COMPILE_STATUS, &compiled);

    if (!compiled) {

        GLsizei len;
        glGetShaderiv(f, GL_INFO_LOG_LENGTH, &len);
        char* log = (char*)malloc(len + 1);
        glGetShaderInfoLog(f, len, &len, log);
        printf("Vertex Shader compilation failed: %s\n", log);
        free(log);
    }

    glAttachShader(p, v);
    glAttachShader(p, f);
    glLinkProgram(p);
    GLint linked;

    glGetProgramiv(p, GL_LINK_STATUS, &linked);

    if (!linked) {

        GLsizei len;
        glGetProgramiv(p, GL_INFO_LOG_LENGTH, &len);
        char* log = (char*)malloc(len + 1);
        glGetProgramInfoLog(p, len, &len, log);
        printf("Shader linking failed: %s\n", log);
        free(log);
    }

    glUseProgram(p);

    return p;

}

/*******************************************************/

void init() {

    glGenVertexArrays(1, &vao);
    glBindVertexArray(vao);
    glGenBuffers(1, &vbo);
    glBindBuffer(GL_ARRAY_BUFFER, vbo);
    glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
    GLuint program = initShaders("triangle.vs", "triangle.fs");

    glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, 0);
    glEnableVertexAttribArray(0);

}

/**********************************************************/

void display(void) {

    glClear(GL_COLOR_BUFFER_BIT);

    glBindVertexArray(vao);
    glDrawArrays(GL_TRIANGLES, 0, 3);
    glFlush();

}

/*******************************************************/

int main(int argc, char** argv) {

    glutInit(&argc, argv);
    glutInitDisplayMode(GLUT_RGBA);
    glutInitWindowSize(512, 512);
    glutInitWindowPosition(0, 0);
    glutCreateWindow("Triangle");

    if (glewInit()) {
        printf("%s\n", "Unable to initialize GLEW ...");
    }

    init();
    /* we can now get data for the specific OpenGL instance we created */
    const GLubyte *renderer = glGetString(GL_RENDERER);
    const GLubyte *vendor = glGetString(GL_VENDOR);
    const GLubyte *version = glGetString(GL_VERSION);
    const GLubyte *glslVersion = glGetString(GL_SHADING_LANGUAGE_VERSION);
    
    printf("GL Vendor : %s\n", vendor);
    printf("GL Renderer : %s\n", renderer);
    printf("GL Version (string) : %s\n", version);
    
    printf("GLSL Version : %s\n", glslVersion);
    printf("%s\n", glGetString(GL_VERSION));
    glutDisplayFunc(display);
    glutMainLoop();

    return 0;

}

/*******************************************************/

And here is my systems opengl information from glxinfo | grep OpenGL

OpenGL vendor string: Intel Open Source Technology Center
OpenGL renderer string: Mesa DRI Intel(R) UHD Graphics 620 (Kabylake GT2)
OpenGL core profile version string: 4.5 (Core Profile) Mesa 18.3.6
OpenGL core profile shading language version string: 4.50
OpenGL core profile context flags: (none)
OpenGL core profile profile mask: core profile
OpenGL core profile extensions:
OpenGL version string: 3.0 Mesa 18.3.6
OpenGL shading language version string: 1.30
OpenGL context flags: (none)
OpenGL extensions:
OpenGL ES profile version string: OpenGL ES 3.2 Mesa 18.3.6
OpenGL ES profile shading language version string: OpenGL ES GLSL ES 3.20
OpenGL ES profile extensions:

I installed all the mesa/glut/glew packages for linux that I have seen that are required, but it seems like everything that is needed is installed.

genpfault
  • 51,148
  • 11
  • 85
  • 139
0xef2387
  • 37
  • 2
  • 8
  • Do you need to ask for a core profile? – user253751 Jan 05 '21 at 22:22
  • Intel Graphics Drivers for Linux must be installed. I think from [here](https://01.org/linuxgraphics/downloads/2018q1-intel-graphics-stack-recipe) – Hihikomori Jan 05 '21 at 22:33
  • @user253751 they did not provide a core profile or say one was needed. They did say they got it to work on ubuntu and fedora. – 0xef2387 Jan 05 '21 at 22:58
  • @Hihikomori from my understanding my graphics drivers are up to date – 0xef2387 Jan 05 '21 at 22:59
  • The shortcut is to change version to 310 es in shader. – Hihikomori Jan 05 '21 at 23:05
  • @Hihikomori That worked for my needs, thank you. – 0xef2387 Jan 06 '21 at 01:53
  • Well, I suspect you need to ask for a core profile. I don't know how to do that in GLUT. If you do not ask for a core profile, your code runs in OpenGL 3.0 compatibility mode (IIRC) so you can't use new features, like OpenGL 4.3 shaders. – user253751 Jan 06 '21 at 17:12
  • @user253751 I guess my searching of this problem wasnt sufficent, but someone linked this to an already answered question that helped solve it. I had to add glutInitContextVersion(4,4); to my initialization. Thanks for input. – 0xef2387 Jan 06 '21 at 23:19

0 Answers0