I'm trying to print out the integers from 11 to 30 multiplied by 0.015
for ( let i = 11; i <= 30; i++ ) {
console.log( `${i} * 0.015 = ${i * 0.015}` );
}
I expect the output to look like this:
11 * 0.015 = 0.165
12 * 0.015 = 0.18
13 * 0.015 = 0.195
14 * 0.015 = 0.21
15 * 0.015 = 0.225
16 * 0.015 = 0.24
17 * 0.015 = 0.255
18 * 0.015 = 0.27
19 * 0.015 = 0.285
20 * 0.015 = 0.3
21 * 0.015 = 0.315
22 * 0.015 = 0.33
23 * 0.015 = 0.345
24 * 0.015 = 0.36
25 * 0.015 = 0.375
26 * 0.015 = 0.39
27 * 0.015 = 0.405
28 * 0.015 = 0.42
29 * 0.015 = 0.435
30 * 0.015 = 0.45
Instead I get this:
11 * 0.015 = 0.16499999999999998
12 * 0.015 = 0.18
13 * 0.015 = 0.195
14 * 0.015 = 0.21
15 * 0.015 = 0.22499999999999998
16 * 0.015 = 0.24
17 * 0.015 = 0.255
18 * 0.015 = 0.27
19 * 0.015 = 0.285
20 * 0.015 = 0.3
21 * 0.015 = 0.315
22 * 0.015 = 0.32999999999999996
23 * 0.015 = 0.345
24 * 0.015 = 0.36
25 * 0.015 = 0.375
26 * 0.015 = 0.39
27 * 0.015 = 0.40499999999999997
28 * 0.015 = 0.42
29 * 0.015 = 0.435
30 * 0.015 = 0.44999999999999996
I looked around on StackOverflow and it seems like JavaScript has an issue with floating point precision. So I tried one of the methods I saw for rounding numbers to an arbitrary step:
function round( number, step ) {
const inverseStep = 1 / step;
return Math.round( number * inverseStep ) / inverseStep;
}
for ( let i = 11; i <= 30; i++ ) {
const rounded = round( i * 0.015, 0.015 );
console.log( `${i} * 0.015 = ${rounded}` );
}
And I still get the same results with the strange irrational looking decimals.