1

I am looking for a way to draw text on EGLTexture using Skia library in C/C++. I am planning to run a program that does it on Android platform during bootup before SurfaceFlinger comes up.

Please don't point me to Android Java examples as that is not what I am looking for. I am troubleshooting UI issue at frame buffer level. I am looking for a way to do this in C/C++ using Android native libs (Skia etc).

I have a sample program that can render an image using SkBitmap onto EGLTexture. I was able to display it on monitor. I followed same example and came up with a strategy like this. But it doesn't work though.

0) Clear the screen with green color
1) Create SkBitmap of size 640x480.
2) Create EGLTexture backed by the pixel buffer returned by SkBitmap.lockPixels()
3) Draw text on SkBitmap using SkCanvas. Upload the bitmap into the above texture. 4) Then draw the texture on current surface

I used boot animation program (of android) as my starting point for this. When I ran this program, all I am seeing is green color. I checked for errors of EGL calls. They all seem to succeed. Thanks for your help

Posted this question on google groups. Brian from Google has some pointers here: https://groups.google.com/d/topic/skia-discuss/aC5f6HB4gSU/discussion

Below is code that implements the above.

#define EXPECT_NO_GL_ERROR(stmt)  \
  do {  \
    stmt;  \
    const EGLint error_code = eglGetError();  \
    if (EGL_SUCCESS != error_code){  \
        LOGD("GLTest: GL error code %d at %s:%d", error_code, __FILE__, __LINE__); \
        __android_log_assert("GLTest", "GLtest", "GlTest"); \
    }\
  } while(0)

struct Texture
{
    GLint w;
    GLint h;
    GLuint id;
};
bool GLTest::frametest()
{
    Texture texFrame;

    // Paint screen with green color
    glShadeModel (GL_FLAT);
    glDisable (GL_DITHER);
    glDisable (GL_SCISSOR_TEST);
    glClearColor(0, 1, 0, 1);
    glClear (GL_COLOR_BUFFER_BIT);
    eglSwapBuffers(mDisplay, mSurface);

    SkGraphics::Init();
    SkBitmap bitmap;

    bitmap.setConfig(SkBitmap::kARGB_8888_Config, 640, 480);
    bitmap.allocPixels();
    if (NO_ERROR != initTexture(&texFrame, bitmap))
    {
        LOGD("GLTest: Unable to create a texture that is backed by SkBitmap");
        return false;
    }
    SkCanvas canvas(bitmap);
    SkPaint textAttribs;

    textAttribs.setColor(0xFFFFFFFF);
    textAttribs.setTextSize(SkIntToScalar(24));

    const nsecs_t startTime = systemTime();
    int frame_count = 0;
    do
    {
        nsecs_t now = systemTime();
        double time = now - startTime;

        canvas.drawColor(0xFF0000FF);
        canvas.drawText("Hello world", strlen("Hello world"), 200, 400,
                textAttribs);
        initTexture(&texFrame, bitmap); // Upload bitmap into canvas
        glEnable (GL_BLEND);
        EXPECT_NO_GL_ERROR(glBindTexture(GL_TEXTURE_2D, texFrame.id));
        EXPECT_NO_GL_ERROR(glDrawTexiOES(0, 0, 0, texFrame.w, texFrame.h));
        EGLBoolean res = eglSwapBuffers(mDisplay, mSurface);
        if (res == EGL_FALSE)
            break;
        frame_count++;
        if (0 == (frame_count % 150))
            LOGD("GLTest: Completed %d frames", frame_count);
        // 12fps: don't animate too fast to preserve CPU
        const nsecs_t sleepTime = 83333 - ns2us(systemTime() - now);
        if (sleepTime > 0)
            usleep(sleepTime);
    } while (!exitPending());

    return false;
}

status_t GLTest::initTexture(Texture* texture, SkBitmap &bitmap)
{
    bitmap.lockPixels();

    const int w = bitmap.width();
    const int h = bitmap.height();
    const void* p = bitmap.getPixels();

    GLint crop[4] =
    { 0, h, w, -h };
    texture->w = w;
    texture->h = h;

    EXPECT_NO_GL_ERROR(glGenTextures(1, &(texture->id)));
    EXPECT_NO_GL_ERROR(glBindTexture(GL_TEXTURE_2D, texture->id));

    switch (bitmap.getConfig())
    {
    case SkBitmap::kA8_Config:
        EXPECT_NO_GL_ERROR(
                glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, p));
        break;
    case SkBitmap::kARGB_4444_Config:
        EXPECT_NO_GL_ERROR(
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4, p));
        break;
    case SkBitmap::kARGB_8888_Config:
        EXPECT_NO_GL_ERROR(
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, p));
        break;
    case SkBitmap::kRGB_565_Config:
        EXPECT_NO_GL_ERROR(
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, p));
        break;
    default:
        break;
    }

    EXPECT_NO_GL_ERROR(
            glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, crop));
    EXPECT_NO_GL_ERROR(
            glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST));
    EXPECT_NO_GL_ERROR(
            glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST));
    EXPECT_NO_GL_ERROR(
            glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT));
    EXPECT_NO_GL_ERROR(
            glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT));
    return NO_ERROR;
}
videoguy
  • 1,732
  • 2
  • 24
  • 49
  • 1
    Not sure what this has to do with android-source (aosp) but you might get more help with the android-ndk tag on your question. – Andrew T. Jan 22 '14 at 18:36
  • FWIW, if your character set is limited you can just pre-render the glyphs into a texture and draw from that. That's what screenrecord v1.1 does -- see https://android.googlesource.com/platform/frameworks/av/+/master/cmds/screenrecord/ (esp. TextRenderer). – fadden Feb 06 '14 at 17:56
  • The text that I am drawing is not static. I am doing this as part of an app that is measuring frame rate. The text keeps changing based on frame being painted. – videoguy Feb 06 '14 at 18:11

1 Answers1

0

I found why my code didn't work. When creating textures, the width and height must be power of 2. For example, if the width is 1920, then texture should be created with a width of 2048 (as 2048 is next 2 power).

Changed initTexture to below. Now I am able to draw text to SkBitmap and then upload the bitmap to texture and draw the texture.

Below is new initTexture that uploads given bitmap to a texture.

bool initTexture(Texture* texture, const SkBitmap &bitmap)
{
    bool result = true;
    SkAutoLockPixels alp(bitmap);

    const int w = bitmap.width();
    const int h = bitmap.height();
    const void* p = bitmap.getPixels();
    int tw = 1 << (31 - __builtin_clz(w));
    int th = 1 << (31 - __builtin_clz(h));
    if (tw < w)
        tw <<= 1;
    if (th < h)
        th <<= 1;

    if (NULL == texture)
        return false;
    if (texture->id != 0)
    {
        glBindTexture(GL_TEXTURE_2D, texture->id);
        switch (bitmap.getConfig())
        {
        case SkBitmap::kA8_Config:
            glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_ALPHA, GL_UNSIGNED_BYTE, p);
            break;
        case SkBitmap::kARGB_4444_Config:
            glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4, p);
            break;
        case SkBitmap::kARGB_8888_Config:
            glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, p);
            break;
        case SkBitmap::kRGB_565_Config:
            glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, p);
            break;
        default:
            break;
        }
        return true;
    }
    GLint crop[4] = { 0, h, w, -h };
    texture->w = w;
    texture->h = h;

    glEnable (GL_TEXTURE_2D);
    glGenTextures(1, &(texture->id));
    glBindTexture(GL_TEXTURE_2D, texture->id);

    switch (bitmap.getConfig())
    {
    case SkBitmap::kA8_Config:
        glTexImage2D(GL_TEXTURE_2D, 0, GL_ALPHA, w, h, 0, GL_ALPHA, GL_UNSIGNED_BYTE, p);
        break;
    case SkBitmap::kARGB_4444_Config:
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4, p);
        break;
    case SkBitmap::kARGB_8888_Config:
        if (tw != w || th != h)
        {
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, tw, th, 0, GL_RGBA,
                    GL_UNSIGNED_BYTE, 0);
            glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA,
                    GL_UNSIGNED_BYTE, p);
        }
        else
        {
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, p);
        }
        break;
    case SkBitmap::kRGB_565_Config:
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, w, h, 0, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, p);
        break;
    default:
        break;
    }

    glTexParameteriv(GL_TEXTURE_2D, GL_TEXTURE_CROP_RECT_OES, crop);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
    glTexParameterx(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
    return result;
}
videoguy
  • 1,732
  • 2
  • 24
  • 49
  • 1
    I think you meant "2048" rather than "2024". There's a more general algorithm (http://stackoverflow.com/questions/1322510/given-an-integer-how-do-i-find-the-next-largest-power-of-two-using-bit-twiddlin) if you don't want to depend on `__builtin_clz`. – fadden Mar 11 '14 at 21:22