6

I keep hoping some code will appear on the internet, but getting nowhere ;)

WebRTC incoming I420Frame object seems to have 3 arrays of yuvPlanes

A typical Android camera app gets PreviewCallback.onPreviewFrame byte[] as a single array of bytes.

Can someone help me in how to convert I420Frames yuvPlanes to a single byte[] array like PreviewCallback.onPreviewFrame byte[] YCbCr_420_SP (NV21)?

For reference, VideoStreamsView.java has this code to render to OpenGL - but I just want it like camera preview ;) From: https://code.google.com/p/libjingle/source/browse/trunk/talk/examples/android/src/org/appspot/apprtc/VideoStreamsView.java?r=286

// Upload the YUV planes from |frame| to |textures|.
private void texImage2D(I420Frame frame, int[] textures) {
for (int i = 0; i < 3; ++i) {
  ByteBuffer plane = frame.yuvPlanes[i];
  GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
  GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]);
  int w = i == 0 ? frame.width : frame.width / 2;
  int h = i == 0 ? frame.height : frame.height / 2;
  abortUnless(w == frame.yuvStrides[i], frame.yuvStrides[i] + "!=" + w);
  GLES20.glTexImage2D(
      GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, w, h, 0,
      GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, plane);
}
checkNoGLES2Error();
}

Thank you.

  • You probably want to convert a NV21 preview buffer to I420 to send it out via WebRTC? – Alex Cohn Dec 28 '13 at 08:53
  • Opposite direction. I have an incoming I420Frame - which is rather internal to WebRTC - and want to make it a general image frame - such as android Bitmap (which could be converted to JPG). – RoundSparrow hilltx Dec 29 '13 at 02:20
  • 1
    To produce JPG or bitmap, you don't need NV21. You need [yuvImage](http://developer.android.com/reference/android/graphics/YuvImage.html) which supports multi-plane YUV_420_888 format. – Alex Cohn Dec 29 '13 at 10:53

2 Answers2

4

OK, here you go:

// Copy the bytes out of |src| and into |dst|, ignoring and overwriting
// positon & limit in both buffers.
//** copied from org/webrtc/VideoRenderer.java **//
private static void copyPlane(ByteBuffer src, ByteBuffer dst) {
  src.position(0).limit(src.capacity());
  dst.put(src);
  dst.position(0).limit(dst.capacity());
}

public static android.graphics.YuvImage ConvertTo(org.webrtc.VideoRenderer.I420Frame src, int imageFormat) {
    switch (imageFormat) {
    default:
        return null;

    case android.graphics.ImageFormat.YV12: {
        byte[] bytes = new byte[src.yuvStrides[0]*src.height +
                            src.yuvStrides[1]*src.height/2 + 
                            src.yuvStrides[2]*src.height/2];
        ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.yuvStrides[0]*src.height);
        copyPlane(src.yuvPlanes[0], tmp);
        tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height, src.yuvStrides[2]*src.height/2);
        copyPlane(src.yuvPlanes[2], tmp);
        tmp = ByteBuffer.wrap(bytes, src.yuvStrides[0]*src.height+src.yuvStrides[2]*src.height/2, src.yuvStrides[1]*src.height/2);
        copyPlane(src.yuvPlanes[1], tmp);
        int[] strides = src.yuvStrides.clone();
        return new YuvImage(bytes, imageFormat, src.width, src.height, strides);
    }

    case android.graphics.ImageFormat.NV21: {
        if (src.yuvStrides[0] != src.width)
            return convertLineByLine(src);
        if (src.yuvStrides[1] != src.width/2)
            return convertLineByLine(src);
        if (src.yuvStrides[2] != src.width/2)
            return convertLineByLine(src);

        byte[] bytes = new byte[src.yuvStrides[0]*src.height +
                            src.yuvStrides[1]*src.height/2 + 
                            src.yuvStrides[2]*src.height/2];
        ByteBuffer tmp = ByteBuffer.wrap(bytes, 0, src.width*src.height);
        copyPlane(src.yuvPlanes[0], tmp);

        byte[] tmparray = new byte[src.width/2*src.height/2];
        tmp = ByteBuffer.wrap(tmparray, 0, src.width/2*src.height/2);

        copyPlane(src.yuvPlanes[2], tmp);
        for (int row=0; row<src.height/2; row++) {
            for (int col=0; col<src.width/2; col++) {
                bytes[src.width*src.height + row*src.width + col*2] = tmparray[row*src.width/2 + col];
            }
        }
        copyPlane(src.yuvPlanes[1], tmp);
        for (int row=0; row<src.height/2; row++) {
            for (int col=0; col<src.width/2; col++) {
                bytes[src.width*src.height + row*src.width + col*2+1] = tmparray[row*src.width/2 + col];
            }
        }
        return new YuvImage(bytes, imageFormat, src.width, src.height, null);
    }
    }
}

public static android.graphics.YuvImage convertLineByLine(org.webrtc.VideoRenderer.I420Frame src) {
    byte[] bytes = new byte[src.width*src.height*3/2];
    int i=0;
    for (int row=0; row<src.height; row++) {
        for (int col=0; col<src.width; col++) {
            bytes[i++] = src.yuvPlanes[0][col+row*src.yuvStrides[0]];
        }
    }
    for (int row=0; row<src.height/2; row++) {
        for (int col=0; col<src.width/2; col++) {
            bytes[i++] = src.yuvPlanes[2][col+row*src.yuvStrides[2]];
            bytes[i++] = src.yuvPlanes[1][col+row*src.yuvStrides[1]];
        }
    }
    return new YuvImage(bytes, android.graphics.ImageFormat.NV21, src.width, src.height, null);

    }
}

This converts I420Frame to Android YuvImage of android.graphics.ImageFormat.NV21, which you can compressToJpeg(). ImageFormat.YV12 support seems to be limited in SDK. Note that the Y and V must be shuffled.

Most error checking is skipped for brevity.

Alex Cohn
  • 56,089
  • 9
  • 113
  • 307
  • Hey Alex - thanks for the effort on this. However, the code so far isn't working. The final line of the ConvertTo method returns: java.lang.IllegalArgumentException: only support ImageFormat.NV21 and ImageFormat.YUY2 for now – RoundSparrow hilltx Dec 30 '13 at 02:11
  • Unfortunately, NV21 will be less efficient. Stay tuned for an update. – Alex Cohn Dec 30 '13 at 10:49
  • Alex - grep on the code for WebRTC reveals it uses NV21 internally in many places – RoundSparrow hilltx Dec 30 '13 at 16:28
  • Tried your latest edit - specified NV21 as the int parameter - it came out with colors distorted. you are close :) here is sample: http://i.imgur.com/SKo8VOK.jpg - I'll look at the code closer. – RoundSparrow hilltx Dec 30 '13 at 21:13
  • With a visual, it will be much easier now. – Alex Cohn Dec 30 '13 at 22:23
  • Alex - what fix? Stackoverflow says you haven't edited for 11 hours - or am I confused? – RoundSparrow hilltx Dec 30 '13 at 23:13
  • Apparently, they have bugs, too. Just compare the code - I had screwwed it up while reformatting to paste first time. – Alex Cohn Dec 31 '13 at 04:54
  • I did copy/repaste the code and still had color distortions... you did catch the sample image I posted? http://i.imgur.com/SKo8VOK.jpg - and code from 4 hours ago: http://i.imgur.com/MAQXYLv.jpg – RoundSparrow hilltx Dec 31 '13 at 05:26
  • I am afraid I see no changes between the two JPGs. Make sure you use the second method (the one that accepts imageFormat parameter)! **PS** maybe, the strides were not good last time, so try once more. – Alex Cohn Dec 31 '13 at 09:59
  • Any clues as to how to handle the strides not matching the height? I'm getting images from an `ImageReader` and my stride values are a little higher than the width of the image. – Roberto Andrade Feb 27 '15 at 00:07
  • @RobertoAndrade: strides are related to width; essentially, they let the lines be optimally aligned; there is no connection between stride (or width) to height. `ImageFormat.YV12` simply supports strides, but `ImageFormat.NV21` expects tightly packed pixels. The code above does just that. – Alex Cohn Feb 28 '15 at 10:45
  • Ok, so why does it return with null if the stride values does not match the width? – Roberto Andrade Feb 28 '15 at 14:40
  • I would expect it just to skip by those bytes in order to form the continuous NV21 stream, no? – Roberto Andrade Feb 28 '15 at 14:58
  • @RobertoAndrade: this is an old post, so I did not remember exactly what I did there. You are right, there is this check and **null** is returned when stride does not match width. Actually, for NV21 the performance hit of copying line by line will probably not be too high (because anyways, manipulation of chroma makes this conversion significantly slower than YV12). I will post an updated version. – Alex Cohn Mar 01 '15 at 06:45
  • Please Can anyone answer this?http://stackoverflow.com/questions/41841257/android-org-webrtc-videorenderer-i420frame-arrays-from-an-image – user2801184 Jan 25 '17 at 00:40
  • Hello @AlexCohn Thkank you for this can you please help me how can we do same thing for video? – vasupujy Aug 31 '18 at 04:42
  • @vasupujy I am afraid I don't understand what you are looking for. Maybe you should open a separate question and describe your problem in detail, as well as what you tried to do to solve it. – Alex Cohn Aug 31 '18 at 09:33
  • @AlexCohn I want to VideoRenderer.I420Frame convert into mp4 video and save into sdcard is it possible Thanks in advance – vasupujy Aug 31 '18 at 09:56
3

While the question is pretty old, I'm posting this answer for anyone who need to do this conversion. I found the below code in one of the WebRTC test cases, and it works perfectly.

 private void testI420toNV21Conversion(ImageView iv) {
    // create a test I420 frame. Instead of this you can use the actual frame
    VideoFrame.I420Buffer i420Buffer = createTestI420Buffer();

    final int width = i420Buffer.getWidth();
    final int height = i420Buffer.getHeight();
    //convert to nv21, this is the same as byte[] from onPreviewCallback
    byte[] nv21Data = createNV21Data(i420Buffer);

    //let's test the conversion by converting the NV21 data to jpg and showing it in a bitmap.
    YuvImage yuvImage = new YuvImage(nv21Data, ImageFormat.NV21,width,height,null);
    ByteArrayOutputStream out = new ByteArrayOutputStream();
    yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, out);
    byte[] imageBytes = out.toByteArray();
    Bitmap image = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);
    iv.setImageBitmap(image);
}

.

/** Create an NV21Buffer with the same pixel content as the given I420 buffer. **/
public static byte[] createNV21Data(VideoFrame.I420Buffer i420Buffer) {
    final int width = i420Buffer.getWidth();
    final int height = i420Buffer.getHeight();
    final int chromaStride = width;
    final int chromaWidth = (width + 1) / 2;
    final int chromaHeight = (height + 1) / 2;
    final int ySize = width * height;
    final ByteBuffer nv21Buffer = ByteBuffer.allocateDirect(ySize + chromaStride * chromaHeight);
    // We don't care what the array offset is since we only want an array that is direct.
    @SuppressWarnings("ByteBufferBackingArray") final byte[] nv21Data = nv21Buffer.array();
    for (int y = 0; y < height; ++y) {
        for (int x = 0; x < width; ++x) {
            final byte yValue = i420Buffer.getDataY().get(y * i420Buffer.getStrideY() + x);
            nv21Data[y * width + x] = yValue;
        }
    }
    for (int y = 0; y < chromaHeight; ++y) {
        for (int x = 0; x < chromaWidth; ++x) {
            final byte uValue = i420Buffer.getDataU().get(y * i420Buffer.getStrideU() + x);
            final byte vValue = i420Buffer.getDataV().get(y * i420Buffer.getStrideV() + x);
            nv21Data[ySize + y * chromaStride + 2 * x + 0] = vValue;
            nv21Data[ySize + y * chromaStride + 2 * x + 1] = uValue;
        }
    }
    return nv21Data;
}

/** Convert a byte array to a direct ByteBuffer. */
private static ByteBuffer toByteBuffer(int[] array) {
    final ByteBuffer buffer = ByteBuffer.allocateDirect(array.length);
    buffer.put(toByteArray(array));
    buffer.rewind();
    return buffer;
}


/**
 * Convert an int array to a byte array and make sure the values are within the range [0, 255].
 */
private static byte[] toByteArray(int[] array) {
    final byte[] res = new byte[array.length];
    for (int i = 0; i < array.length; ++i) {
        final int value = array[i];
        res[i] = (byte) value;
    }
    return res;
}


public static VideoFrame.I420Buffer createTestI420Buffer() {
    final int width = 16;
    final int height = 16;
    final int[] yData = new int[] {156, 162, 167, 172, 177, 182, 187, 193, 199, 203, 209, 214, 219,
            224, 229, 235, 147, 152, 157, 162, 168, 173, 178, 183, 188, 193, 199, 205, 210, 215, 220,
            225, 138, 143, 148, 153, 158, 163, 168, 174, 180, 184, 190, 195, 200, 205, 211, 216, 128,
            133, 138, 144, 149, 154, 159, 165, 170, 175, 181, 186, 191, 196, 201, 206, 119, 124, 129,
            134, 140, 145, 150, 156, 161, 166, 171, 176, 181, 187, 192, 197, 109, 114, 119, 126, 130,
            136, 141, 146, 151, 156, 162, 167, 172, 177, 182, 187, 101, 105, 111, 116, 121, 126, 132,
            137, 142, 147, 152, 157, 162, 168, 173, 178, 90, 96, 101, 107, 112, 117, 122, 127, 132, 138,
            143, 148, 153, 158, 163, 168, 82, 87, 92, 97, 102, 107, 113, 118, 123, 128, 133, 138, 144,
            149, 154, 159, 72, 77, 83, 88, 93, 98, 103, 108, 113, 119, 124, 129, 134, 139, 144, 150, 63,
            68, 73, 78, 83, 89, 94, 99, 104, 109, 114, 119, 125, 130, 135, 140, 53, 58, 64, 69, 74, 79,
            84, 89, 95, 100, 105, 110, 115, 120, 126, 131, 44, 49, 54, 59, 64, 70, 75, 80, 85, 90, 95,
            101, 106, 111, 116, 121, 34, 40, 45, 50, 55, 60, 65, 71, 76, 81, 86, 91, 96, 101, 107, 113,
            25, 30, 35, 40, 46, 51, 56, 61, 66, 71, 77, 82, 87, 92, 98, 103, 16, 21, 26, 31, 36, 41, 46,
            52, 57, 62, 67, 72, 77, 83, 89, 94};
    final int[] uData = new int[] {110, 113, 116, 118, 120, 123, 125, 128, 113, 116, 118, 120, 123,
            125, 128, 130, 116, 118, 120, 123, 125, 128, 130, 132, 118, 120, 123, 125, 128, 130, 132,
            135, 120, 123, 125, 128, 130, 132, 135, 138, 123, 125, 128, 130, 132, 135, 138, 139, 125,
            128, 130, 132, 135, 138, 139, 142, 128, 130, 132, 135, 138, 139, 142, 145};
    final int[] vData = new int[] {31, 45, 59, 73, 87, 100, 114, 127, 45, 59, 73, 87, 100, 114, 128,
            141, 59, 73, 87, 100, 114, 127, 141, 155, 73, 87, 100, 114, 127, 141, 155, 168, 87, 100,
            114, 128, 141, 155, 168, 182, 100, 114, 128, 141, 155, 168, 182, 197, 114, 127, 141, 155,
            168, 182, 196, 210, 127, 141, 155, 168, 182, 196, 210, 224};
    return JavaI420Buffer.wrap(width, height, toByteBuffer(yData),
            /* strideY= */ width, toByteBuffer(uData), /* strideU= */ width / 2, toByteBuffer(vData),
            /* strideV= */ width / 2,
            /* releaseCallback= */ null);
}
Rick Sanchez
  • 4,528
  • 2
  • 27
  • 53