8

I'm trying to use the MediaCodec to retrive all the frames from a video for image processing stuff, I'm trying to render the video and to capture the frame from the outBuffers but I can't initiate a bitmap instance from the received bytes.

I've tried to render it to a surface or to nothing(null), because I've notice that when you rendering to null then the outBuffers are getting the bytes of the rendered frames.

This is the code:

    private static final String SAMPLE = Environment.getExternalStorageDirectory() + "/test_videos/sample2.mp4";
private PlayerThread mPlayer = null;

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    SurfaceView sv = new SurfaceView(this);
    sv.getHolder().addCallback(this);
    setContentView(sv);
}

protected void onDestroy() {
    super.onDestroy();
}

@Override
public void surfaceCreated(SurfaceHolder holder) {
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    if (mPlayer == null) {
        mPlayer = new PlayerThread(holder.getSurface());
        mPlayer.start();
    }
}

@Override
public void surfaceDestroyed(SurfaceHolder holder) {
    if (mPlayer != null) {
        mPlayer.interrupt();
    }
}

private void writeFrameToSDCard(byte[] bytes, int i, int sampleSize) {
    try {
        Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, sampleSize);

        File file = new File(Environment.getExternalStorageDirectory() + "/test_videos/sample" + i + ".png");
        if (file.exists())
            file.delete();

        file.createNewFile();

        FileOutputStream out = new FileOutputStream(file.getAbsoluteFile());

        bmp.compress(Bitmap.CompressFormat.PNG, 90, out);
        out.close();

    } catch (Exception e) {
        e.printStackTrace();
    }
}

private class PlayerThread extends Thread {
    private MediaExtractor extractor;
    private MediaCodec decoder;
    private Surface surface;

    public PlayerThread(Surface surface) {
        this.surface = surface;
    }

    @Override
    public void run() {
        extractor = new MediaExtractor();
        extractor.setDataSource(SAMPLE);

        int index = extractor.getTrackCount();
        Log.d("MediaCodecTag", "Track count: " + index);

        for (int i = 0; i < extractor.getTrackCount(); i++) {
            MediaFormat format = extractor.getTrackFormat(i);
            String mime = format.getString(MediaFormat.KEY_MIME);
            if (mime.startsWith("video/")) {
                extractor.selectTrack(i);
                decoder = MediaCodec.createDecoderByType(mime);
                decoder.configure(format, surface, null, 0);
                break;
            }
        }

        if (decoder == null) {
            Log.e("DecodeActivity", "Can't find video info!");
            return;
        }

        decoder.start();

        ByteBuffer[] inputBuffers = decoder.getInputBuffers();
        ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
        BufferInfo info = new BufferInfo();
        boolean isEOS = false;
        long startMs = System.currentTimeMillis();

        int i = 0;
        while (!Thread.interrupted()) {
            if (!isEOS) {
                int inIndex = decoder.dequeueInputBuffer(10000);
                if (inIndex >= 0) {
                    ByteBuffer buffer = inputBuffers[inIndex];

                    int sampleSize = extractor.readSampleData(buffer, 0);

                    if (sampleSize < 0) {
                        decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        isEOS = true;
                    } else {
                        decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
                        extractor.advance();
                    }
                }
            }

            /* saves frame to sdcard */
            int outIndex = decoder.dequeueOutputBuffer(info, 10000); // outIndex most of the times null

            switch (outIndex) {
            case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
                outputBuffers = decoder.getOutputBuffers();
                break;
            case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
                break;
            case MediaCodec.INFO_TRY_AGAIN_LATER:
                Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
                break;
            default:
                ByteBuffer buffer = outputBuffers[outIndex];
                Log.v("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + buffer);

                // We use a very simple clock to keep the video FPS, or the video
                // playback will be too fast
                while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
                    try {
                        sleep(10);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                        break;
                    }
                }
                decoder.releaseOutputBuffer(outIndex, true);
                try {
                    byte[] dst = new byte[outputBuffers[outIndex].capacity()];
                    outputBuffers[outIndex].get(dst);
                    writeFrameToSDCard(dst, i, dst.length);
                    i++;
                } catch (Exception e) {
                    Log.d("iDecodeActivity", "Error while creating bitmap with: " + e.getMessage());
                }

                break;
            }

            // All decoded frames have been rendered, we can stop playing now
            if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
                break;
            }
        }

        decoder.stop();
        decoder.release();
        extractor.release();
    }
}

Any help would be much appriciated

bland
  • 1,968
  • 1
  • 15
  • 22
Nativ
  • 3,092
  • 6
  • 38
  • 69
  • Hai i'm trying to get frames from video actually i'd tried with mediametedatareteriver but the images were repeatedly same for all images i've tried a lot but can't fix it after using ffmpeg some many issues then i'd gone with MediaExcator i can't understand it by surfing in internet for it saw your code in stackoverflow so can you please help me by your full working code Sir because i'm new to Android. – Manoj May 22 '14 at 11:39

1 Answers1

16

You can decode to a Surface or to a ByteBuffer, but not both. Because you are configuring a Surface, there will always be zero bytes of data in the output buffer.

If you configure for ByteBuffer decoding, the data format will vary, but to my knowledge will never be an ARGB format that Bitmap understands. You can see examples of two YUV formats being examined in the buffer-to-buffer tests in the CTS EncodeDecodeTest in method checkFrame(). Note, however, that the first thing it does is check the format and return immediately if it's not recognized.

At present (Android 4.4), the only reliable way to do this is to decode to a SurfaceTexture, render that with GLES, and extract RGB data with glReadPixels(). Sample code is available on bigflake -- see ExtractMpegFramesTest (requires API 16+).

fadden
  • 51,356
  • 5
  • 116
  • 166
  • Thank you for the response, as you said I rendered to byte buffer(put null instead of surface, and in releaseOutputBuffer I putted false). Now, as I see it, for API below 18 if I want to get the frames as bitmaps than I need to identify the format and change it myself to RGB? sounds like an awkward way to get frames out of video, probably openCV is more suitable for this kind of job – Nativ Nov 04 '13 at 12:18
  • Haven't used OpenCV. The GLES approach is the most efficient, but also the most complex to implement. Note this works starting in API 16; I mentioned API 18 because there wasn't anything better there. API 19 introduces `android.media.ImageReader`, but I need to check what formats that supports. – fadden Nov 04 '13 at 15:40
  • I added sample code to bigflake. I also confirmed that `ImageReader` does not handle `MediaCodec` decoder output. – fadden Nov 07 '13 at 18:46
  • Tiny amendment to this; When using surface texture, I get 8 bytes in the buffer (not had a look as to what they are yet) – Soylent Graham Nov 05 '15 at 11:17
  • 1
    @SoylentGraham: should be a constant followed by the buffer handle; see https://android.googlesource.com/platform/frameworks/av/+/marshmallow-release/media/libstagefright/SurfaceMediaSource.cpp#252 – fadden Nov 05 '15 at 16:55
  • Hey fadden, I am trying create a video recorder using Grafika. I have created a TextureView and creating a encoder using Circular encoder. If video is played uninterrupted the new video on video completion is fine. But if the video is paused in between then the new video has the still frames from time the video was paused. can you please help in this? – Ravneet Singh Aug 03 '17 at 13:37
  • Maybe you can help me https://stackoverflow.com/q/56791589/5709159 I thinks it is exact question for you... – Sirop4ik Jun 30 '19 at 13:38