I am having an issue with retrieving data from my database.
When I open the application, the application will get data from the firebase in onCreate()
and add this to the recycler view.
The data is structured like this:
"Items" {
"some category" {
"useruid" {
"some key" {
"Title" : "Some time",
"Info" : "Some info about the item",
"photoitem" : "A image that is compressed / encoded to a base 64 string"
"useruid" : "the user uid, which i use for something else"
}
}
}
}
Now, at the time I'm testing, I have 3 items ("some keys"). When I am retrieving the data, I can see in the Android Profiler that the application uses 16mb of data to retrieve these three items, which is way too much. Does applications use this much data when retrieving images, or am I doing something wrong?
NB: The structure in the database makes it easy to get items from different categories, and only show items that the user does have / does not have.
Any tip/help would be really helpful. Since 26.11 untill today, my application has used 9,4gb of data by only testing (opening the app, then closing etc.).
This is how I save the images to the firebase:
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
//Dersom startActivityForResult er velykket, kjøres denne
Log.d(TAG, " Før requestCode check");
if (requestCode == REQUEST_IMAGE_CAPTURE) {
Log.d(TAG, " Før resultCode check");
if (resultCode == RESULT_OK) {
//Setter bildet i imageviewet og sender det til firebasedatabasen
//setPic();
//Lagrer bildet i galleriet
galleryAddPic();
Log.d(TAG, " Bilde lagt til i galleri");
try {
handleSamplingAndRotationBitmap(getApplicationContext(), photoURI);
}
catch (IOException e) {Log.d(TAG, "Fungerte ikke");}
}
}
}
//Gjør om bildet til base64 (en String), slik at det kan legges til i databasen som en string
public void addImageToFirebase(Bitmap bitmap) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.PNG, 100, baos);
imageEncoded = Base64.encodeToString(baos.toByteArray(), Base64.DEFAULT);
Log.d(TAG, "Image added to firebase");
try {
baos.close();
}
catch (IOException e) {
Log.d(TAG, e.getMessage());
}
}
public void handleSamplingAndRotationBitmap(Context context, Uri selectedImage)
throws IOException {
int MAX_HEIGHT = 1024;
int MAX_WIDTH = 1024;
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
InputStream imageStream = context.getContentResolver().openInputStream(selectedImage);
BitmapFactory.decodeStream(imageStream, null, options);
imageStream.close();
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, MAX_WIDTH, MAX_HEIGHT);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
imageStream = context.getContentResolver().openInputStream(selectedImage);
Bitmap img = BitmapFactory.decodeStream(imageStream, null, options);
img = rotateImageIfRequired(context, img, selectedImage);
//Legger bildet til i databasen
//Legger bildet til i imageviewet
ImageView imageview = (ImageView) findViewById(R.id.userImageInput);
imageview.setImageBitmap(img);
addImageToFirebase(img);
}
private static int calculateInSampleSize(BitmapFactory.Options options,
int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 2;
if (height > reqHeight || width > reqWidth) {
// Calculate ratios of height and width to requested height and width
final int heightRatio = Math.round((float) height / (float) reqHeight);
final int widthRatio = Math.round((float) width / (float) reqWidth);
// Choose the smallest ratio as inSampleSize value, this will guarantee a final image
// with both dimensions larger than or equal to the requested height and width.
inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio;
final float totalPixels = width * height;
// Anything more than 2x the requested pixels we'll sample down further
final float totalReqPixelsCap = reqWidth * reqHeight * 2;
while (totalPixels / (inSampleSize * inSampleSize) > totalReqPixelsCap) {
inSampleSize++;
}
}
return inSampleSize;
}
private static Bitmap rotateImageIfRequired(Context context, Bitmap img, Uri selectedImage) throws IOException {
InputStream input = context.getContentResolver().openInputStream(selectedImage);
ExifInterface ei;
if (Build.VERSION.SDK_INT > 23)
ei = new ExifInterface(input);
else
ei = new ExifInterface(selectedImage.getPath());
int orientation = ei.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_90:
return rotateImage(img, 90);
case ExifInterface.ORIENTATION_ROTATE_180:
return rotateImage(img, 180);
case ExifInterface.ORIENTATION_ROTATE_270:
return rotateImage(img, 270);
default:
return img;
}
}
And this is how the image-string gets decoded:
//Retrieving from firebase:
private void prepareItemData() {
database = FirebaseDatabase.getInstance();
DatabaseReference dbref = database.getReference("Items");
dbref.child("category").addChildEventListener(new ChildEventListener() {
@Override
public void onChildAdded(DataSnapshot dataSnapshot, String s) {
if(dataSnapshot.exists()) {
for(DataSnapshot ds: dataSnapshot.getChildren()) {
if(!dataSnapshot.getKey().equals(thisUser)) {
Items item1 = ds.getValue(Items.class);
lowagedata.add(item1);
}
}
iAdapter.notifyDataSetChanged();
}
}
@Override
public void onChildChanged(DataSnapshot dataSnapshot, String s) {
}
@Override
public void onChildRemoved(DataSnapshot dataSnapshot) {
}
@Override
public void onChildMoved(DataSnapshot dataSnapshot, String s) {
}
@Override
public void onCancelled(DatabaseError databaseError) {
}
});
}
//Here is the decoder:
public static Bitmap decodeFromFirebaseBase64(String image) throws IOException {
byte[] decodedByteArray = android.util.Base64.decode(image, Base64.DEFAULT);
return BitmapFactory.decodeByteArray(decodedByteArray, 0, decodedByteArray.length);
}