(I'm not a JS developer!) I have an array of scores that I multiply by a standard set of weightings, to obtain a weighted overall score. I then want to express that to one decimal place.
After abandoning the battle with precision error when calculating with raw decimals, I took the approach to first multiply the weightings by 100. This is all working, except for this bizarre behaviour where 2.55 is rounded to 2.5 instead of 2.6! WTF?
No doubt this is JS 101 but can someone enlighten me as to WHY and how to address this?
weightings = [0.15, 0.10, 0.10, 0.15, 0.15, 0.25, 0.10];
scores1 = [2,2,2,2,2,3,2];
scores2 = [2,2,2,2,2,3,3];
scores3 = [3,3,1,2,3,3,2];
adjWeightings = weightings.map(x => x * 100); // get rid of decimals altogether
weightedResult1 = scores1.reduce(function(r,a,i){return r+(a*adjWeightings[i])},0)/100;
console.log(weightedResult1); // expected: 2.25 (pass)
console.log(weightedResult1.toPrecision(2)); // expected: "2.3" (pass)
weightedResult2 = scores2.reduce(function(r,a,i){return r+(a*adjWeightings[i])},0)/100;
console.log(weightedResult2); // expected: 2.35 (pass)
console.log(weightedResult2.toPrecision(2)); // expected: "2.4" (pass)
weightedResult3 = scores3.reduce(function(r,a,i){return r+(a*adjWeightings[i])},0)/100;
console.log(weightedResult3); // expected: 2.55 (pass)
console.log(weightedResult3.toPrecision(2)); // expected "2.6" (fail! whyyyyyyyyyy is this not "2.6"?????)