I'm learning OpenGL according to this tutorial, and trying to write simple function that draws colored rectangle.
Input params:
size: CGSize,
scale: CGFloat,
This part of code works ok:
// Setup context
let api = EAGLRenderingAPI.OpenGLES3
let context = EAGLContext(API: api)
EAGLContext.setCurrentContext(context)
// Setup render buffer
var renderBuffer = GLuint()
glGenRenderbuffers(1, &renderBuffer)
glBindRenderbuffer(GLenum(GL_RENDERBUFFER), renderBuffer)
let bufferWidth = GLsizei(size.width * scale)
let bufferHeight = GLsizei(size.height * scale)
let bufferFormat = GLenum(GL_RGBA8)
glRenderbufferStorage(GLenum(GL_RENDERBUFFER), bufferFormat, bufferWidth, bufferHeight)
// Setup frame buffer
var frameBuffer = GLuint()
glGenFramebuffers(1, &frameBuffer)
glBindFramebuffer(GLenum(GL_FRAMEBUFFER), frameBuffer)
glFramebufferRenderbuffer(GLenum(GL_FRAMEBUFFER), GLenum(GL_COLOR_ATTACHMENT0), GLenum(GL_RENDERBUFFER), renderBuffer)
// Draw
glClearColor(1, 0.5, 0.5, 1)
glClear(GLbitfield(GL_COLOR_BUFFER_BIT))
Most problematic part is to create an UIImage:
// Get bytes
let byteLength = Int(bufferWidth * bufferHeight) * 4;
let bytes = malloc(byteLength)
glReadPixels(0, 0, bufferWidth, bufferHeight, bufferFormat, GLenum(GL_UNSIGNED_BYTE), bytes)
print( glGetError() ) // prints 1280 (GL_INVALID_ENUM) !!!
// Create a CGImage
let dataProvider = CGDataProviderCreateWithData(nil, bytes, byteLength, nil)
let colorspace = CGColorSpaceCreateDeviceRGB()
let bitmapInfo: CGBitmapInfo = [.ByteOrder32Little, CGBitmapInfo(rawValue: CGImageAlphaInfo.Last.rawValue)]
let aCGImage = CGImageCreate(
Int(bufferWidth),
Int(bufferHeight),
8,
4,
4 * Int(bufferWidth),
colorspace,
bitmapInfo,
dataProvider,
nil,
false,
.RenderingIntentDefault
)!
let anUIImage = UIImage(CGImage: aCGImage, scale: scale, orientation: .Up)
While glReadPixels
generates GL_INVALID_ENUM error, I'm not even sure if my way to create UIImage is correct.