I'm new to OpenGL ES. I'm trying to write code for screen recording of iOS apps, especially games.
I'm using the 'render to texture' method described with code in this answer (https://stackoverflow.com/a/9704392/707773) to capture screen and write the video for a cocos2d game. One modification I made was that, when I call CVOpenGLESTextureCacheCreate
then I'm using [EAGLContext currentContext]
instead of [[GPUImageOpenGLESContext sharedImageProcessingOpenGLESContext] context]
It does record the video but there are two issues
When it starts recording then new drawing on the screen stops. I want the app to keep on drawing on the screen too. As I'm new to OpenGL ES, I don't have deep understanding of frame buffer objects etc., so I have a hard time figuring out how to simultaneously draw on screen and capture the screen as well. I'll appreciate a code example in this regard.
The recorded video is flipped upside down. How can I get it in correct direction?
Previously I considered glReadPixels
method too, but that has performance drawbacks.
Update: a couple of ideas also came to mind. According to my little understanding,
I could simply draw my texture back to screen, but don't know how.
UPDATE: Main Draw
// ----- Display the keyframe -----
Texture* t = augmentationTexture[OBJECT_KEYFRAME_1 + playerIndex];
frameTextureID = [t textureID];
aspectRatio = (float)[t height] / (float)[t width];
texCoords = quadTexCoords;
// Get the current projection matrix
QCAR::Matrix44F projMatrix = vapp.projectionMatrix;
// If the current status is valid (not NOT_READY or ERROR), render the
// video quad with the texture we've just selected
if (NOT_READY != currentStatus) {
// Convert trackable pose to matrix for use with OpenGL
QCAR::Matrix44F modelViewMatrixVideo = QCAR::Tool::convertPose2GLMatrix(trackablePose);
QCAR::Matrix44F modelViewProjectionVideo;
// SampleApplicationUtils::translatePoseMatrix(0.0f, 0.0f, videoData[playerIndex].targetPositiveDimensions.data[0], &modelViewMatrixVideo.data[0]);
SampleApplicationUtils :: scalePoseMatrix(videoData[playerIndex].targetPositiveDimensions.data[0], videoData[playerIndex].targetPositiveDimensions.data[0] * aspectRatio, videoData[playerIndex].targetPositiveDimensions.data[0], &modelViewMatrixVideo.data[0]);
SampleApplicationUtils::multiplyMatrix(projMatrix.data, &modelViewMatrixVideo.data[0], &modelViewProjectionVideo.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, quadVertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, quadNormals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, frameTextureID);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*) &modelViewProjectionVideo.data[0]);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glDrawElements(GL_TRIANGLES, kNumQuadIndices, GL_UNSIGNED_SHORT, quadIndices);
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
glUseProgram(0);
}
Add the video texture buffer to the frame
glBindTexture([videoWriter textureCacheTarget], [videoWriter textureCacheID]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, [videoWriter textureCacheID], 0);