I have been working on a facial expression detection application using Android Studio with java and I have a few issues that I can't figure out.
Shortly, this application uses the camera to take a picture, display it in image_view and then run the facial expression detection. My main issue is that when I press the button nothing happens. I have added logs for my methods but only the ones inside the captureButton seem to appear in Logcat. Could you help me understand why is this happening? Why it doesn't capture the image and then display it in image_view?
This is my MainActivity.java file (I have a few more methods I did not attach that concern the camera view / usage / permission but I excluded them as I don't think they are relevant for this issue, that part works just fine)
public class MainActivity extends AppCompatActivity implements SurfaceHolder.Callback {
private static final int REQUEST_IMAGE_CAPTURE = 1;
private static final int REQUEST_PERMISSIONS = 2;
private Camera camera;
private ImageView imageView;
private TextView resultTextView;
private SurfaceView cameraPreview;
private String currentPhotoPath;
private MultiLayerNetwork neuralNetwork;
private Bitmap imageBitmap;
Button captureButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
Log.d("onCreateMethod", "Start the onCreate method");
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
captureButton = findViewById(R.id.capture_button);
imageView = findViewById(R.id.image_view);
resultTextView = findViewById(R.id.result_text_view);
cameraPreview = findViewById(R.id.camera_preview);
cameraPreview.getHolder().addCallback(this);
captureButton.setOnClickListener(v -> {
// Check if the camera permission is granted
if (ActivityCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
// Request the camera permission
ActivityCompat.requestPermissions(MainActivity.this,
new String[]{Manifest.permission.CAMERA}, REQUEST_PERMISSIONS);
} else {
// Launch the camera activity
Log.d("dispatchInButton", "Dispatch intent in button activity");
dispatchTakePictureIntent();
Log.d("Button", "Capture button clicked");
}
});
// Load the neural network from the saved model
try {
neuralNetwork = ModelSerializer.restoreMultiLayerNetwork(getAssets().open("model.zip"));
} catch (IOException e) {
e.printStackTrace();
}
}
private File createImageFile() throws IOException {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES);
File imageFile = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
// Save a file path for use with ACTION_VIEW intents
currentPhotoPath = imageFile.getAbsolutePath();
return imageFile;
}
private void dispatchTakePictureIntent() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// Ensure that there's a camera activity to handle the intent
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
// Create the File where the photo should go
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
ex.printStackTrace();
}
// Continue only if the File was successfully created
if (photoFile != null) {
Uri photoURI = FileProvider.getUriForFile(MainActivity.this, "com.example.newact.fileprovider", photoFile);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI);
startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE);
}
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
// Load the captured image from file path
Bundle extras = data.getExtras();
Bitmap capturedImage = (Bitmap) extras.get("data");
// Bitmap capturedImage = BitmapFactory.decodeFile(currentPhotoPath);
Log.d("onActivRes", "begin onActivRes method");
// Display the captured image
imageView.setImageBitmap(capturedImage);
// Recognize the facial expression
FacialExpressionRecognizer recognizer = new FacialExpressionRecognizer(neuralNetwork);
String expression = recognizer.recognize(capturedImage);
// Display the recognized expression
resultTextView.setText("Recognized expression: " + expression);
// Save the image and recognized expression to Firebase Storage and Firestore
FirebaseImageUploader.uploadImage(capturedImage, new FirebaseImageUploader.ImageUploadCallback() {
@Override
public void onImageUploaded(String imageUrl) {
// Save the timestamp and recognized expression to Firestore
FirebaseExpressionSaver expressionSaver = new FirebaseExpressionSaver();
expressionSaver.saveExpression(expression, new FirebaseExpressionSaver.ExpressionSaveCallback() {
@Override
public void onExpressionSaved() {
// Expression save successful
Toast.makeText(MainActivity.this, "Expression saved to Firebase", Toast.LENGTH_SHORT).show();
}
@Override
public void onExpressionSaveFailed(Exception e) {
// Expression save failed
Toast.makeText(MainActivity.this, "Failed to save expression to Firebase", Toast.LENGTH_SHORT).show();
}
@Override
public void onSuccess() {
// Handle the successful saving of the expression here
}
@Override
public void onFailure(Exception e) {
// Handle the failure of saving the expression here
}
});
}
@Override
public void onImageUploadFailed(Exception e) {
// Image upload failed
Toast.makeText(MainActivity.this, "Failed to upload image to Firebase", Toast.LENGTH_SHORT).show();
}
@Override
public void onSuccess(String imageUrl) {
// Handle the successful upload of the image here
}
@Override
public void onFailure(Exception e) {
// Handle the failure of the image upload here
}
});
}
}
Please let me know your input on this!
I have been trying to use logs and see what works and where it stops, but I have come as far as this:
(https://i.stack.imgur.com/9GJF8.png)
Added the other methods in the MainActivity.java as well:
@Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_PERMISSIONS) {
if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// Permission granted, launch the camera activity
dispatchTakePictureIntent();
} else {
// Permission denied, show a message or handle the case accordingly
Toast.makeText(this, "Camera permission denied", Toast.LENGTH_SHORT).show();
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
try {
camera = Camera.open();
setCameraDisplayOrientation();
camera.setPreviewDisplay(holder);
camera.startPreview();
} catch (IOException e) {
e.printStackTrace();
}
}
private void setCameraDisplayOrientation() {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, info);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate for the mirror effect
} else {
result = (info.orientation - degrees + 360) % 360;
}
camera.setDisplayOrientation(result);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
if (camera != null) {
// Stop the preview
camera.stopPreview();
// Set the camera parameters
Camera.Parameters parameters = camera.getParameters();
Camera.Size optimalSize = getOptimalPreviewSize(parameters.getSupportedPreviewSizes(), width, height);
parameters.setPreviewSize(optimalSize.width, optimalSize.height);
// Rotate the camera preview by 0 degrees to the right
camera.setDisplayOrientation(0);
// Apply the updated parameters
camera.setParameters(parameters);
// Start the preview
camera.startPreview();
}
}
private Camera.Size getOptimalPreviewSize(List<Camera.Size> supportedPreviewSizes, int width, int height) {
final double ASPECT_TOLERANCE = 0.1;
double targetRatio = (double) width / height;
Camera.Size optimalSize = null;
double minDiff = Double.MAX_VALUE;
for (Camera.Size size : supportedPreviewSizes) {
double ratio = (double) size.width / size.height;
if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE)
continue;
double diff = Math.abs(size.width - width) + Math.abs(size.height - height);
if (diff < minDiff) {
optimalSize = size;
minDiff = diff;
}
}
if (optimalSize == null) {
minDiff = Double.MAX_VALUE;
for (Camera.Size size : supportedPreviewSizes) {
double diff = Math.abs(size.width - width) + Math.abs(size.height - height);
if (diff < minDiff) {
optimalSize = size;
minDiff = diff;
}
}
}
return optimalSize;
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
}