I wrote a simple command-line program for figuring out how long it will take to transfer a large file via a given interface. I don't understand why, but it always produces output like this: It will take Infinity seconds or Infinity minutes to transfer 10.0 gigabytes/10240.0 megabytes.
I don't understand why it's giving me that output.
import java.util.Scanner;
public class TransferTime {
public static void main(String[] args) {
Scanner input = new Scanner(System.in);
double speed = 0;
double gigabytes = 10;
System.out.println("This will help you find out how long it will take to transfer a file");
System.out.println();
System.out.println("How many gigabytes do you want to transfer?");
gigabytes = input.nextDouble();
System.out.println("Enter 2 for USB 2, 3 for USB 3, f for FireWire 800 or t for Thunderbolt");
String connection = input.next();
System.out.println();
if (connection == "2") {
speed = 480;
} else if (connection == "f")
speed = 800;
else if (connection == "3") {
speed = 625;
} else if (connection == "f") {
speed = 100;
} else if (connection == "t") {
speed = 1280;
}
double megabytes = gigabytes * 1024;
double seconds = megabytes / speed;
double minutes = seconds / 60;
System.out.println("It will take " + seconds + " seconds or " + minutes
+ " minutes to transfer " + gigabytes + " gigabytes/"
+ megabytes + " megabytes");
input.close();
}
}