I have recently stumbled upon this stackoverflow post
Rotating YUV image data for Portrait Mode Using RenderScript
I am trying to do the same thing, to rotate the image, crop it and convert it to a byte array, so i can later on create a openCV mat from it. It is needed for real time processing and needs to be high performant, an implementation with open CV was taking around 200ms which is too long.
I have tried to implement @Alex Cohns library into my project, these are the functions I am using:
/RS Kernel
#pragma version(1)
#pragma rs java_package_name(whatever)
#pragma rs_fp_relaxed
rs_allocation Yplane;
uint32_t Yline;
uint32_t UVline;
rs_allocation Uplane;
rs_allocation Vplane;
rs_allocation NV21;
uint32_t Width;
uint32_t Height;
uchar4 __attribute__((kernel)) YUV420toRGB(uint32_t x, uint32_t y)
{
uchar Y = rsGetElementAt_uchar(Yplane, x + y * Yline);
uchar V = rsGetElementAt_uchar(Vplane, (x & ~1) + y/2 * UVline);
uchar U = rsGetElementAt_uchar(Uplane, (x & ~1) + y/2 * UVline);
// https://en.wikipedia.org/wiki/YCbCr#JPEG_conversion
short R = Y + (512 + 1436 * V) / 1024; // 1.402
short G = Y + (512 - 352 * U - 731 * V) / 1024; // -0.344136 -0.714136
short B = Y + (512 + 1815 * U ) / 1024; // 1.772
if (R < 0) R == 0; else if (R > 255) R == 255;
if (G < 0) G == 0; else if (G > 255) G == 255;
if (B < 0) B == 0; else if (B > 255) B == 255;
return (uchar4){R, G, B, 255};
}
uchar4 __attribute__((kernel)) YUV420toRGB_180(uint32_t x, uint32_t y)
{
return YUV420toRGB(Width - 1 - x, Height - 1 - y);
}
uchar4 __attribute__((kernel)) YUV420toRGB_90(uint32_t x, uint32_t y)
{
return YUV420toRGB(y, Width - x - 1);
}
uchar4 __attribute__((kernel)) YUV420toRGB_270(uint32_t x, uint32_t y)
{
return YUV420toRGB(Height - 1 - y, x);
}
/YuvConverter.java
public class YuvConverter implements AutoCloseable {
private RenderScript rs;
private ScriptC_yuv2rgb scriptC_yuv2rgb;
private Bitmap bmp;
YuvConverter(Context ctx, int ySize, int uvSize, int width, int height) {
rs = RenderScript.create(ctx);
scriptC_yuv2rgb = new ScriptC_yuv2rgb(rs);
init(ySize, uvSize, width, height);
}
private Allocation allocY, allocU, allocV, allocOut;
@Override
public void close() {
if (allocY != null) allocY.destroy();
if (allocU != null) allocU.destroy();
if (allocV != null) allocV.destroy();
if (allocOut != null) allocOut.destroy();
bmp = null;
allocY = null;
allocU = null;
allocV = null;
allocOut = null;
scriptC_yuv2rgb.destroy();
scriptC_yuv2rgb = null;
rs = null;
}
private void init(int ySize, int uvSize, int width, int height) {
if (bmp == null || bmp.getWidth() != width || bmp.getHeight() != height) {
bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
if (allocOut != null) allocOut.destroy();
allocOut = null;
}
if (allocY == null || allocY.getBytesSize() != ySize) {
if (allocY != null) allocY.destroy();
Type.Builder yBuilder = new Type.Builder(rs, Element.U8(rs)).setX(ySize);
allocY = Allocation.createTyped(rs, yBuilder.create(), Allocation.USAGE_SCRIPT);
}
if (allocU == null || allocU.getBytesSize() != uvSize || allocV == null || allocV.getBytesSize() != uvSize ) {
if (allocU != null) allocU.destroy();
if (allocV != null) allocV.destroy();
Type.Builder uvBuilder = new Type.Builder(rs, Element.U8(rs)).setX(uvSize);
allocU = Allocation.createTyped(rs, uvBuilder.create(), Allocation.USAGE_SCRIPT);
allocV = Allocation.createTyped(rs, uvBuilder.create(), Allocation.USAGE_SCRIPT);
}
if (allocOut == null || allocOut.getBytesSize() != width*height*4) {
Type rgbType = Type.createXY(rs, Element.RGBA_8888(rs), width, height);
if (allocOut != null) allocOut.destroy();
allocOut = Allocation.createTyped(rs, rgbType, Allocation.USAGE_SCRIPT);
}
}
@Retention(RetentionPolicy.SOURCE)
// Enumerate valid values for this interface
@IntDef({Surface.ROTATION_0, Surface.ROTATION_90, Surface.ROTATION_180, Surface.ROTATION_270})
// Create an interface for validating int types
public @interface Rotation {}
/**
* Converts an YUV_420 image into Bitmap.
* @param yPlane byte[] of Y, with pixel stride 1
* @param uPlane byte[] of U, with pixel stride 2
* @param vPlane byte[] of V, with pixel stride 2
* @param yLine line stride of Y
* @param uvLine line stride of U and V
* @param width width of the output image (note that it is swapped with height for portrait rotation)
* @param height height of the output image
* @param rotation rotation to apply. ROTATION_90 is for portrait back-facing camera.
* @return RGBA_8888 Bitmap image.
*/
public Bitmap YUV420toRGB(byte[] yPlane, byte[] uPlane, byte[] vPlane,
int yLine, int uvLine, int width, int height,
@Rotation int rotation) {
init(yPlane.length, uPlane.length, width, height);
allocY.copyFrom(yPlane);
allocU.copyFrom(uPlane);
allocV.copyFrom(vPlane);
scriptC_yuv2rgb.set_Width(width);
scriptC_yuv2rgb.set_Height(height);
scriptC_yuv2rgb.set_Yline(yLine);
scriptC_yuv2rgb.set_UVline(uvLine);
scriptC_yuv2rgb.set_Yplane(allocY);
scriptC_yuv2rgb.set_Uplane(allocU);
scriptC_yuv2rgb.set_Vplane(allocV);
switch (rotation) {
case Surface.ROTATION_0:
scriptC_yuv2rgb.forEach_YUV420toRGB(allocOut);
break;
case Surface.ROTATION_90:
scriptC_yuv2rgb.forEach_YUV420toRGB_90(allocOut);
break;
case Surface.ROTATION_180:
scriptC_yuv2rgb.forEach_YUV420toRGB_180(allocOut);
break;
case Surface.ROTATION_270:
scriptC_yuv2rgb.forEach_YUV420toRGB_270(allocOut);
break;
}
allocOut.copyTo(bmp);
return bmp;
}
}
And my use case:
public YuvImage(Image image, int rotation,
int cropX, int cropY, int cropWidth, int cropHeight, boolean isGreyOnly, Context context) {
Image.Plane[] imagePlanes = image.getPlanes();
Image.Plane yPlane = imagePlanes[0];
ByteBuffer yBuffer = yPlane.getBuffer();
Image.Plane uPlane = imagePlanes[1];
Image.Plane vPlane = imagePlanes[2];
ByteBuffer uBuffer = uPlane.getBuffer();
ByteBuffer vBuffer = vPlane.getBuffer();
List<byte[]> myList = new ArrayList<>();
myList.add(getByteArrayFromByteBuffer(yBuffer));
myList.add(getByteArrayFromByteBuffer(uBuffer));
myList.add(getByteArrayFromByteBuffer(vBuffer));
int[] strides = new int[]{yPlane.getRowStride(), yPlane.getPixelStride(), uPlane.getRowStride(), uPlane.getPixelStride(), vPlane.getRowStride(), vPlane.getPixelStride()};
yuvTransform(myList, strides, cropWidth, cropHeight, Surface.ROTATION_90);
}
private static byte[] getByteArrayFromByteBuffer(ByteBuffer byteBuffer) {
byte[] bytesArray = new byte[byteBuffer.remaining()];
byteBuffer.get(bytesArray, 0, bytesArray.length);
return bytesArray;
}
private void yuvTransform(List<byte[]> bytesList, int[] strides, int width, int height, @YuvConverter.Rotation int rotation) {
if (rotation == Surface.ROTATION_90 || rotation == Surface.ROTATION_270) {
int _tmp = width;
width = height;
height = _tmp;
}
if (total_calls == 0) {
long start = new Date().getTime();
yuvConverter = new YuvConverter(context.getApplicationContext(), bytesList.get(0).length, bytesList.get(1) == null ? 0 : bytesList.get(1).length, width, height);
}
total_calls += 1;
long startTime = new Date().getTime();
/*
* hardcoded assuptions:
* stride[0] (yLine) >= width
* stride[1] (yPixel) == 1
* stride[2] (uLine) >= width
* stride[3] (uPixel) == 2
* stride[4] (vLine) == uLine
* stride[5] (vPixel) == uPixel
*/
Bitmap abgr = yuvConverter.YUV420toRGB(bytesList.get(0), bytesList.get(1), bytesList.get(2),
strides[0], strides[2], width, height, rotation);
long bitmapTime = new Date().getTime() - startTime;
total_bitmap += bitmapTime;
int bitmapHeaderSize = 54 + 16;
int bmpSize = bitmapHeaderSize + abgr.getWidth() * abgr.getHeight() * 4;
ByteBuffer byteBuffer = ByteBuffer.allocate(bmpSize);
byte[] byteArray = byteBuffer.array();
byteBuffer.order(ByteOrder.LITTLE_ENDIAN);
byteBuffer.put((byte) 'B');
byteBuffer.put((byte) 'M');
byteBuffer.putLong(byteBuffer.capacity());
byteBuffer.putInt(bitmapHeaderSize);
byteBuffer.putInt(40 + 16); // info header size
byteBuffer.putInt(width);
byteBuffer.putInt(-height);
byteBuffer.putShort((short) 1); // planes
byteBuffer.putShort((short) 32); // bpp
byteBuffer.putInt(3); // BI_BITFIELDS
byteBuffer.putInt(32);
byteBuffer.position(54); // set XBGR
byteBuffer.putInt(0xff); // R
byteBuffer.putInt(0xff00); // G
byteBuffer.putInt(0xff0000); // B
byteBuffer.putInt(0x0); // A
byteBuffer.position(bitmapHeaderSize);
abgr.copyPixelsToBuffer(byteBuffer);
data = getByteArrayFromByteBuffer(byteBuffer);
total_convert += new Date().getTime() - startTime;
}
But all I get is a completely distorted image like this:
Does anyone know why this is happening?
UPDATE:
After omitting the cropping part and taking the entire image, i noticed, that apparently, width and height parameters should be swapped. When i do this, I get a clearer image but the colors are still wrong:
UPDATE 2:
I have managed to get the correct colors by changing the conversion formula (I found another one on the Internet)
Now the renderscript for conversion looks like this:
#pragma version(1)
#pragma rs java_package_name(com.mypackage);
#pragma rs_fp_relaxed
int32_t width;
int32_t height;
uint inUvPixelStride, inUvRowStride;
rs_allocation yIn,uIn,vIn;
uchar4 __attribute__((kernel)) convertYuvToRGB(uint32_t x, uint32_t y)
{
uint uvIndex= inUvPixelStride * (x/2) + inUvRowStride*(y/2);
uchar yPlane= rsGetElementAt_uchar(yIn, x, y);
uchar u= rsGetElementAt_uchar(uIn, uvIndex);
uchar v= rsGetElementAt_uchar(vIn, uvIndex);
int4 argb;
argb.r = yPlane + v * 1436 / 1024 - 179;
argb.g = yPlane -u * 46549 / 131072 + 44 -v * 93604 / 131072 + 91;
argb.b = yPlane +u * 1814 / 1024 - 227;
argb.a = 255;
uchar4 out = convert_uchar4(clamp(argb, 0, 255));
return out;
}