12

Do there is any special emulator settings needed to run OpenGL Apps?

I already set "GPU emulation" property to "yes".

I am trying to run an Android sample live wallpaper, using the sample source found from this link, The desired output is a rotating triangle.

After a little effort I got the app running but it doesn't draw anything in emulator but when I tested in device it works, But in the emulator it still just shows a green screen, I found a discussion on it in Google groups here. I tried to set view port as said in it. But still it doesn't show any result, on surface changed I had added this line

gl.glViewport(0, 0, width, height);

Do this is the correct way to set view port?

This is my render class,

 public class MyRenderer implements GLWallpaperService.Renderer {
    GLTriangle mTriangle;

    public void onDrawFrame(GL10 gl) {


        gl.glClearColor(0.2f, 0.4f, 0.2f, 1f);
        gl.glClear(GL10.GL_COLOR_BUFFER_BIT | GL10.GL_DEPTH_BUFFER_BIT);

        gl.glMatrixMode(GL10.GL_MODELVIEW);
        autoRotate(gl);
        gl.glColor4f(.2f, 0f, .5f, 1f);

        mTriangle.draw(gl);
    }

    public void onSurfaceChanged(GL10 gl, int width, int height) {

        gl.glViewport(0, 0, width, height);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        GLU.gluPerspective(gl, 60f, (float)width/(float)height, 1f, 100f);

        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
        gl.glTranslatef(0, 0, -5);
    }

    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        mTriangle = new GLTriangle();



        gl.glClearDepthf(1f);
        gl.glEnable(GL10.GL_DEPTH_TEST);
        gl.glDepthFunc(GL10.GL_LEQUAL);
    }

    /**
     * Called when the engine is destroyed. Do any necessary clean up because
     * at this point your renderer instance is now done for.
     */
    public void release() {

    }

    private void autoRotate(GL10 gl) {
        gl.glRotatef(1, 0, 1, 0);
        gl.glRotatef(0.5f, 1, 0, 0);
    }
}

Herse is GLTriangle class

import java.nio.FloatBuffer;
import java.nio.ShortBuffer;

import javax.microedition.khronos.opengles.GL10;

public class GLTriangle {
    private FloatBuffer _vertexBuffer;
    private final int _nrOfVertices = 3;

    private ShortBuffer _indexBuffer;

    public GLTriangle() {
        init();
    }

    private void init() {
        // We use ByteBuffer.allocateDirect() to get memory outside of
        // the normal, garbage collected heap. I think this is done
        // because the buffer is subject to native I/O.
        // See http://download.oracle.com/javase/1.4.2/docs/api/java/nio/ByteBuffer.html#direct

        // 3 is the number of coordinates to each vertex.
        _vertexBuffer = BufferFactory.createFloatBuffer(_nrOfVertices * 3);

        _indexBuffer = BufferFactory.createShortBuffer(_nrOfVertices);

        // Coordinates for the vertexes of the triangle.
        float[] coords = {
                -1f, -1f,  0f,  // (x1, y1, z1)
                 1f, -1f,  0f,  // (x2, y2, z2)
                 0f,  1f,  0f   // (x3, y3, z3)
        };

        short[] _indicesArray = {0, 1, 2};

        _vertexBuffer.put(coords);
        _indexBuffer.put(_indicesArray);

        _vertexBuffer.position(0);
        _indexBuffer.position(0);
    }

    public void draw(GL10 gl) {
        // 3 coordinates in each vertex
        // 0 is the space between each vertex. They are densely packed
        //   in the array, so the value is 0
        gl.glVertexPointer(3, GL10.GL_FLOAT, 0, getVertexBuffer());

        // Draw the primitives, in this case, triangles.
        gl.glDrawElements(GL10.GL_TRIANGLES, _nrOfVertices, GL10.GL_UNSIGNED_SHORT, _indexBuffer);
    }

    private FloatBuffer getVertexBuffer() {
        return _vertexBuffer;
    }
}

What's going wrong here? Is there a better sample code for Open GL live wallpaper?

Peter O.
  • 32,158
  • 14
  • 82
  • 96
Renjith K N
  • 2,613
  • 2
  • 31
  • 53
  • 1
    In your Emulator AVD settings, have you set the "GPU emulation" property to "yes"? In the AVD config that's also the `hw.gpu.enabled` property. – kelnos Oct 04 '12 at 07:15
  • Yes I already set "GPU emulation" property to "yes",But Sory i dont get what u mean by "In the AVD config that's also the hw.gpu.enabled property" – Renjith K N Oct 04 '12 at 08:49

1 Answers1

2

AT LAST I FOUND IT..

What I need to do is just add

gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

to onSurfaceCreated method along with the code line

gl.glViewport(0, 0, width, height);

in the onSurfaceChanged method in MyRenderer Class

I found a similar question in stack itself [ But Solution worked for me is not marked as correct :( ]

Community
  • 1
  • 1
Renjith K N
  • 2,613
  • 2
  • 31
  • 53