1

I'm doing an OpenGL learning exercise that involves using glGenFramebuffers(). However, when I call the function, it seems that nothing happens. I created the following simple program to reproduce the problem:

#define GL_GLEXT_PROTOTYPES
#include <stdio.h>
#include <GL/gl.h>
#include <GL/glext.h>

static GLuint fb[2];

int main(void)
{
    glGenFramebuffers(2, fb);
    printf("result: %u %u\n", fb[0], fb[1]);
    return 0;
}

$ gcc -std=c99 -I/usr/X11/include test.c -o test -L/usr/X11/lib -lGL -lOSMesa

$ ./test

The output is result: 0 0

According to http://www.opengl.org/wiki/GLAPI/glGenFramebuffers glGenFramebuffers() should set fb[0] and fb[1]. I can find no reference explaining the actual result in my example. My real code behaves the same way so I suppose it's not a matter of some initialization that is missing here.

Am I doing something wrong or is this some kind of bug?


Edit: The same thing happens even if I have context. Here is a more complete version of the code.

#define GL_GLEXT_PROTOTYPES

#include <GL/gl.h>
#include <GL/glx.h>
#include <GL/glext.h>
#include <xcb/xcb.h>
#include <X11/Xlib-xcb.h>
#include <stdio.h>

static Display *display;
static xcb_connection_t *connection;
static xcb_window_t window;
static GLXDrawable drawable;
static GLXContext context;

static GLuint fb[2];

int main(void)
{
    display = XOpenDisplay(0);
    if (!display) return 0;

    int default_screen = XDefaultScreen(display);

    connection = XGetXCBConnection(display);
    if (!connection) goto error;

    int visualID = 0;

    XSetEventQueueOwner(display, XCBOwnsEventQueue);

    // find XCB screen
    xcb_screen_iterator_t screen_iter = xcb_setup_roots_iterator(xcb_get_setup(connection));
    int screen_num = default_screen;
    while (screen_iter.rem && screen_num > 0)
    {
        screen_num -= 1;
        xcb_screen_next(&screen_iter);
    }
    xcb_screen_t *screen = screen_iter.data;

    // query framebuffer configurations
    GLXFBConfig *fb_configs = 0;
    int num_fb_configs = 0;
    fb_configs = glXGetFBConfigs(display, default_screen, &num_fb_configs);
    if (!fb_configs || num_fb_configs == 0) goto error;

    // select first framebuffer config and query visualID
    GLXFBConfig fb_config = fb_configs[0];
    glXGetFBConfigAttrib(display, fb_config, GLX_VISUAL_ID , &visualID);

    // create OpenGL context
    context = glXCreateNewContext(display, fb_config, GLX_RGBA_TYPE, 0, True);
    if (!context) goto error;

    // create XID's for colormap and window
    xcb_colormap_t colormap = xcb_generate_id(connection);
    window = xcb_generate_id(connection);

    xcb_create_colormap(connection, XCB_COLORMAP_ALLOC_NONE, colormap, screen->root, visualID);

    uint32_t eventmask = XCB_EVENT_MASK_EXPOSURE | XCB_EVENT_MASK_KEY_PRESS | XCB_EVENT_MASK_KEY_RELEASE | XCB_EVENT_MASK_BUTTON_PRESS | XCB_EVENT_MASK_BUTTON_RELEASE;
    uint32_t valuelist[] = {eventmask, colormap, 0};
    uint32_t valuemask = XCB_CW_EVENT_MASK | XCB_CW_COLORMAP;

    // TODO set window parameters
    xcb_create_window(connection, XCB_COPY_FROM_PARENT, window, screen->root, 100, 0, 400, 300, 0, XCB_WINDOW_CLASS_INPUT_OUTPUT, visualID, valuemask, valuelist);

    // NOTE: window must be mapped before glXMakeContextCurrent
    xcb_map_window(connection, window); 

    drawable = glXCreateWindow(display, fb_config, window, 0);

    if (!window)
    {
        xcb_destroy_window(connection, window);
        glXDestroyContext(display, context);
        goto error;
    }

    // make OpenGL context current
    if (!glXMakeContextCurrent(display, drawable, drawable, context))
    {
        xcb_destroy_window(connection, window);
        glXDestroyContext(display, context);
        goto error;
    }

    glGenFramebuffers(2, fb);

    printf("%s\n", glGetString(GL_VERSION));
    printf("%d %d\n", fb[0], fb[1]);

    return 0;

error:
    XCloseDisplay(display);
}

Output:

2.1 NVIDIA-7.32.12
0 0

MacOS X 10.7.5

NVIDIA GeForce 320M 256 MB

$ gcc --version
i686-apple-darwin11-llvm-gcc-4.2 (GCC) 4.2.1 (Based on Apple Inc. build 5658) (LLVM build 2336.1.00)

glGetString(GL_VERSION): 2.1 NVIDIA-7.32.12

martinkunev
  • 1,364
  • 18
  • 39
  • 1
    possible duplicate of [Why Could glGetString(GL\_VERSION) Be Causing a Seg Fault?](http://stackoverflow.com/questions/6288759/why-could-glgetstringgl-version-be-causing-a-seg-fault) – genpfault Jan 20 '14 at 17:27

3 Answers3

3

FBOs are only core in OpenGL 3.0+. You're getting a 2.1 context.

Check for EXT_framebuffer_object support and use glGenFramebuffersEXT() instead.

genpfault
  • 51,148
  • 11
  • 85
  • 139
3

Check the value of glGetError (...) after calling that function. It is very likely GL_INVALID_OPERATION in a 2.1 context. You can call GL 3.x functions from a GL 2.1 context on OS X, but they will always generate GL_INVALID_OPERATION.

The situation is quite different from most other platforms, where function pointers are setup at run-time. On OS X you link to the same library whether you have a GL 2.1 context or GL 3.2+ core, which contains functions for every version of GL that Apple implements. This allows you to call functions that are not implemented in your context version. However, any attempt to call these functions will produce no effect at run-time other than setting GL_INVALID_OPERATION.

To fix this, you either need to use the EXT form of FBOs, or get a 3.2+ core context. Since there is no way to do the later using X11 on OS X, you will probably have to use the extension. I should clarify that when I say use the extension form of FBOs, all this actually invovles on OS X is replacing glGenFramebuffers (...) with glGenFramebuffersEXT (...). You do not have to call any *GetProcAddress (...) functions.

Alternatively you could use a framework like SDL or GLFW3 or move from X11/GLX to NSOpenGL (Obj-C) or CGL (C / C++). Using the native interfaces instead of deprecated things like AGL or X11/GLX on OS X is the only way to get a 3.2 core context.

Andon M. Coleman
  • 42,359
  • 2
  • 81
  • 106
  • glGetError() actually returns 0, but after I changed the code to use the *EXT functions and everything started working :) It's pretty strange how OS X handles opengl versions. I would expect to get compilation error but the library just fails silently at runtime. – martinkunev Jan 20 '14 at 22:16
  • 1
    Usually this is handled pretty gracefully on OS X. If you use the OpenGL framework and include , it only includes the functions provided by the 2.1 context. You have to use to get the function definitions, typedefs and constants from core 3.2+. But since you are using X11/GLX/Mesa instead of the normal OpenGL framework it does not behave that way. – Andon M. Coleman Jan 20 '14 at 22:21
  • I wanted to be able to run the same code on Linux so I decided that's the way to go. – martinkunev Jan 20 '14 at 23:33
  • 1
    I would suggest something like GLFW then, if you do not need to interface directly with X for any reason. You can get a core OpenGL 3.2 context on OS X using GLFW without ever having to write a single piece of Objective C code, and it is highly portable. Ordinarily you would have to write Obj-C code to use NSOpenGL (the preferred native GL interface on OS X, and the only one that lets you draw into a window instead of fullscreen). X11 (XQuartz) is deprecated and only provided for compatibility purposes. – Andon M. Coleman Jan 20 '14 at 23:38
1

You need a OpenGL context, but you don't create one.

datenwolf
  • 159,371
  • 13
  • 185
  • 298