This is a question for Rhythmic Fistman or anyone who knows the answer:
According to thread: How to use OpenGL ES on a separate thread on iphone?
Rhythmic Fistman found that "iOS5's CVOpenGLESTextureCaches essentially make texture uploads free so I don't need shareGroups anymore and my code is simpler and faster."
I am currently working on an app, that draws 3D graphics and saves it in a movie file. From my understanding, UIView's OpenGL ES framebuffer must be backed with a colorRenderBuffer instead of CVOpenGLESTextureCache, which is the way a movie file saves the opengl texture of the 3D graphics.
I don't want OpenGLES to render the same 3D graphics twice, and I want to share the result of the rendering.
Can you please share your knowledge and/or source code of how to use CVOpenGLESTextureCache to share between a worker thread that saves the openGL texture and the main thread's UIView that display the framebuffer?
Thanks in advance.
Regards, Howard
===========Update========
Thanks, I followed Brad's answer and RosyWriter sample code by writing some simple code by rendering both the final buffer and main UIView from the avCaptureOutput Dispatch queue Thread. (Will polish it later).
There are 2 OpenGL-ES 2.0 context, 1 mainContext created for UIWebView, 2 workingContext, created for avCaptureOutput's dispatch queue. They share the same group.
So far so good. Will try to see if the screen tearing effect occur or not.
Thanks so much!
Below is my code:
//Draw texture
-(void)Draw:(CVPixelBufferRef)updatedImageBuffer
{
CVOpenGLESTextureRef cvTexture;
/////////////////////////////////////////////
//first draw graphics to the CVPixelBufferRef
/////////////////////////////////////////////
//creates a live binding between the image buffer and the underlying texture object.
CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage(
kCFAllocatorDefault,
cvTextureCache,
updatedImageBuffer,
NULL,
GL_TEXTURE_2D,
GL_RGBA,// opengl format
esContext.bufWidth,
esContext.bufHeight,
GL_BGRA,// native iOS format
GL_UNSIGNED_BYTE,
0,
&cvTexture);
if (err == kCVReturnSuccess) {
assert(CVOpenGLESTextureGetTarget(cvTexture) == GL_TEXTURE_2D);
GLint texId = CVOpenGLESTextureGetName(cvTexture);
if (!workingContext || [EAGLContext setCurrentContext:workingContext] == NO) {
NSLog(@"SwapBuffers: [EAGLContext setCurrentContext:workingContext] failed");
return;
}
glBindTexture(GL_TEXTURE_2D, texId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindFramebuffer(GL_FRAMEBUFFER, workerFrameBuffer);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, texId, 0);
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if ( status == GL_FRAMEBUFFER_COMPLETE ) {
drawGraphics(&esContext);
glBindTexture(GL_TEXTURE_2D, 0);
//glFlush();
/////////////////////////////////////////////
//then draw the texture to the main UIView
/////////////////////////////////////////////
if (!mainContext || [EAGLContext setCurrentContext:mainContext] == NO) {
NSLog(@"SwapBuffers: [EAGLContext setCurrentContext:mainContext] failed");
return;
}
glBindTexture(GL_TEXTURE_2D, texId);
// Set texture parameters
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glBindFramebuffer(GL_FRAMEBUFFER, mainFrameBuffer);
status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if ( status == GL_FRAMEBUFFER_COMPLETE ) {
// Draw the texture on the screen with OpenGL ES 2
drawUIView(&esContext, textureVertices);
// Present the UIView
glBindRenderbuffer(GL_RENDERBUFFER, mainColorBuffer);
[mainContext presentRenderbuffer:GL_RENDERBUFFER];
glBindTexture(GL_TEXTURE_2D, 0);
}
//glFlush();
}
//Flush textureCache
CVOpenGLESTextureCacheFlush(cvTextureCache, 0);
//Release created texture
CFRelease(cvTexture);
}
}
void drawUIView( ESContext *esContext, const GLfloat* textureVertices)
{
UserData *userData = esContext->userData;
static const GLfloat squareVertices[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
// Set the view port to the entire view
glViewport(0, 0, esContext->viewWidth, esContext->viewHeight);
// Clear the color buffer
glClear ( GL_COLOR_BUFFER_BIT );
// Use shader program.
glUseProgram(userData->passThroughProgram);
// Update attribute values.
glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices);
glEnableVertexAttribArray(ATTRIB_VERTEX);
glVertexAttribPointer(ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices);
glEnableVertexAttribArray(ATTRIB_TEXTUREPOSITON);
// Update uniform values if there are any
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
}
///
// Draw a triangle using the shader pair created in Init()
//
void drawGraphics ( ESContext *esContext )
{
UserData *userData = esContext->userData;
static const GLfloat vVertices[] = { 0.0f, 0.5f, 0.0f,
-0.5f, -0.5f, 0.0f,
0.5f, -0.5f, 0.0f };
// Set the viewport
glViewport ( 0, 0, esContext->bufWidth, esContext->bufHeight );
// Clear the color buffer
glClear ( GL_COLOR_BUFFER_BIT );
// Use the program object
glUseProgram ( userData->graphicsProgram );
// Load the vertex data
glVertexAttribPointer ( 0, 3, GL_FLOAT, GL_FALSE, 0, vVertices );
glEnableVertexAttribArray ( 0 );
glDrawArrays ( GL_TRIANGLES, 0, 3 );
}