1

I'm developing an Android App that has a Camera Preview Activity. It calls takePicture() every 2 second using a timer and does some processing on the captured image in PictureCallback. From the Android documentation, I learnt that PictureCallback happens in the same thread as Camera.open().

Also, it's recommended to call takePicture() in a separate thread. What's the best way to call StartPreview() after an image is captured?

I would want the processing on each capture to happen on separate threads and the camera preview should continue in the main UI thread. What's the best way to implement this using AsyncTask()?

public class CameraActivity extends AppCompatActivity{


public static final int MEDIA_TYPE_IMAGE = 1;
public static final int MEDIA_TYPE_VIDEO = 2;
public static String TAG = "Exception";
int viewWidth = 0;
int viewHeight = 0;
private Camera mCamera;
private CameraPreview mPreview;
private ImageView iv;
private RelativeLayout rl;
private Camera.PictureCallback mPicture;
private MRZ_OCR mrz = null;


@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_camera);

    rl = (RelativeLayout) findViewById(R.id.rel_camera);
    iv = (ImageView) findViewById(R.id.black_above);
    viewWidth = iv.getWidth();
    viewHeight = rl.getHeight() - 2 * iv.getHeight();

    // Create an instance of Camera
    mCamera = getCameraInstance();


    mPreview = new CameraPreview(this, mCamera);
    FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
    preview.addView(mPreview);

    new Timer().schedule(new TimerTask() {
        @Override
        public void run() {

            mCamera.startPreview();
            mrz = new MRZ_OCR();
            mrz.execute();


        }
    }, 4000, 4000); 


    mPicture = new Camera.PictureCallback() {

        @Override
        public void onPictureTaken(byte[] data, Camera camera) {


            // Crop to get only MRZ
            Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
            bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);


            //Verify if it has MRZ
            bm = MRZ.getMRZ(bm);


            if (bm != null) {


                ByteArrayOutputStream stream = new ByteArrayOutputStream();
                bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
                byte[] byteArray = stream.toByteArray();
                createImageFile(byteArray);
                Toast.makeText(getApplicationContext(), "Pic Saved", Toast.LENGTH_LONG).show();


            }


        }
    };


}

@Override
protected void onPause() {
    super.onPause();
    releaseCamera();              // release the camera immediately on pause event
}

private void releaseCamera() {
    if (mCamera != null) {
        mCamera.release();        // release the camera for other applications
        mCamera = null;
    }
}

private class MRZ_OCR extends AsyncTask<Void, Void, Void>

{
    private byte[] data;


    @Override
    protected Void doInBackground(Void... params) {

        mCamera.takePicture(null, null, mPicture);

        // Sleep for however long, you could store this in a variable and
        // have it updated by a menu item which the user selects.
        try {
            Thread.sleep(3000); // 3 second preview
        } catch (InterruptedException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        return null;
    }

    @Override
    protected void onPostExecute(Void result) {
        // This returns the preview back to the live camera feed
        mCamera.startPreview();
    }
}

public static int pxFromDp(final Context context, final float dp) {
    return (int) (dp * context.getResources().getDisplayMetrics().density);
}

/**
 * A safe way to get an instance of the Camera object.
 */
public static Camera getCameraInstance() {
    Camera c = null;
    try {
        c = Camera.open(); // attempt to get a Camera instance
    } catch (Exception e) {
        // Camera is not available (in use or does not exist)
    }
    return c; // returns null if camera is unavailable
}

private static File getOutputMediaFile(int type)

{


    File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
            Environment.DIRECTORY_PICTURES), "MyCameraApp");


    // Create the storage directory if it does not exist
    if (!mediaStorageDir.exists()) {
        if (!mediaStorageDir.mkdirs()) {
            Log.d("MyCameraApp", "failed to create directory");
            return null;
        }
    }

    // Create a media file name
    String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
    File mediaFile;
    if (type == MEDIA_TYPE_IMAGE) {
        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                "IMG_" + timeStamp + ".jpg");
    } else if (type == MEDIA_TYPE_VIDEO) {
        mediaFile = new File(mediaStorageDir.getPath() + File.separator +
                "VID_" + timeStamp + ".mp4");
    } else {
        return null;
    }


    return mediaFile;
}

private static void createImageFile(byte[] byteArray) {
    //create empty image type file
    File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE);
    if (pictureFile == null) {
        Log.d(TAG, "Error creating media file, check storage permissions: ");
        return;
    }


    try {
        FileOutputStream fos = new FileOutputStream(pictureFile);
        fos.write(byteArray);
        fos.close();
    } catch (FileNotFoundException e) {
        Log.d(TAG, "File not found: " + e.getMessage());
    } catch (IOException e) {
        Log.d(TAG, "Error accessing file: " + e.getMessage());
    }
}

} 
Community
  • 1
  • 1
rsd_unleashed
  • 151
  • 2
  • 12

2 Answers2

0

I don't know about the api takePicture(),but i think what you need to do is put this code in a separate thread.

 Bitmap bm = BitmapFactory.decodeByteArray(data, 0, data.length);
            bm = Bitmap.createBitmap(bm, 0, pxFromDp(CameraActivity.this, 120), viewWidth, viewHeight);


            //Verify if it has MRZ
            bm = MRZ.getMRZ(bm);


            if (bm != null) {


                ByteArrayOutputStream stream = new ByteArrayOutputStream();
                bm.compress(Bitmap.CompressFormat.PNG, 100, stream);
                byte[] byteArray = stream.toByteArray();
                createImageFile(byteArray);

            }

decodeBitmap is a time-consuming process, especially in you app,performed once every 2 seconds. it will blocking the main thread. and why it's recommended to call takePicture() in a separate thread, I think it is the same reason.

Yua
  • 23
  • 8
0

You already answered your question. Pass byte[] data to an AsyncTask:

private class PictureConverter extends AsyncTask<Void, Void, Void> {
    private byte[] data;
    private Camera camera;
    public PictureConverter(byte[] _data, Camera _camera) {
        data = _data;
        camera = _camera;

    }
    protected Void doInBackground(Void... data) {
        Camera.Parameters parameters = camera.getParameters();

        ByteArrayOutputStream out = new ByteArrayOutputStream();
        YuvImage yuvImage = new YuvImage(data, parameters.getPreviewFormat(), parameters.getPreviewSize().width, parameters.getPreviewSize().height, null);
        yuvImage.compressToJpeg(new Rect(0, 0, parameters.getPreviewSize().width, parameters.getPreviewSize().height), 90, out);
        byte[] imageBytes = out.toByteArray();
        Bitmap bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.length);


        out.flush();
        out.close();

        //TODO save the image

        return null;
    }

    protected void onProgressUpdate() {
    }

    protected void onPostExecute() {
        //TODO report that the image got saved
    }
}
IIIIIIIIIIIIIIIIIIIIII
  • 3,958
  • 5
  • 45
  • 70