I am trying to write a game using opengl, but I am having a lot of trouble with the new glkit classes and the default template from iOS.
- (void)viewDidLoad
{
[super viewDidLoad];
self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
if (!self.context) {
NSLog(@"Failed to create ES context");
}
if(!renderer)
renderer = [RenderManager sharedManager];
tiles = [[TileSet alloc]init];
GLKView *view = (GLKView *)self.view;
view.context = self.context;
view.drawableDepthFormat = GLKViewDrawableDepthFormat24;
[self setupGL];
}
- (void)setupGL
{
int width = [[self view] bounds].size.width;
int height = [[self view] bounds].size.height;
[EAGLContext setCurrentContext:self.context];
self.effect = [[GLKBaseEffect alloc] init];
self.effect.light0.enabled = GL_TRUE;
self.effect.light0.diffuseColor = GLKVector4Make(0.4f, 0.4f, 0.4f, 1.0f);
//Configure Buffers
glGenFramebuffers(1, &framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
glGenRenderbuffers(2, &colourRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, colourRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGBA8_OES, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, colourRenderBuffer);
glGenRenderbuffers(3, &depthRenderBuffer);
glBindRenderbuffer(GL_RENDERBUFFER, depthRenderBuffer);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, depthRenderBuffer);
//Confirm everything happened awesomely
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER) ;
if(status != GL_FRAMEBUFFER_COMPLETE) {
NSLog(@"failed to make complete framebuffer object %x", status);
}
glEnable(GL_DEPTH_TEST);
// Enable the OpenGL states we are going to be using when rendering
glEnableClientState(GL_VERTEX_ARRAY);
}
- (void)glkView:(GLKView *)view drawInRect:(CGRect)rect
{
glClearColor(0.4f, 0.4f, 0.4f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glBindFramebuffer(GL_FRAMEBUFFER, framebuffer);
float iva[] = {
0.0,0.0,0.0,
0.0,1.0,0.0,
1.0,1.0,0.0,
1.0,0.0,0.0,
};
glVertexPointer(3, GL_FLOAT, sizeof(float) * 3, iva);
glDrawArrays(GL_POINTS, 0, 4);
}
@end
With this the buffer clears(to a grey colour), but nothing from the vertex array renders. I have no idea what to do from here and due to the age of the technology there is not much information available on how to properly use glkit.