I just created a simple JavaScript counter. The first version adds a constant every second to an initial value; the second version adds 1 to the initial value in a proportionately reduced amount of time. I'm not rounding any numbers. But if you let the counter run long enough, the resulting values start diverging considerably. Any idea why that is?
window.onload = function() {
var Today = new Date();
var Jan = new Date("January 1 2020 00:00");
var dif = (Today.getTime() - Jan.getTime()) / 1000;
const Clklid = 3.55988;
var new1 = Clklid * dif;
var new2 = Clklid * dif;
var text = document.getElementById("text")
var text2 = document.getElementById("text2")
setInterval(function() {
new1 += Clklid;
// new1 = Math.trunc(new1);
text.innerHTML = new1 + " g emisí CO2";
}, 1000);
setInterval(function() {
new2 += 1;
// new2 = Math.trunc(new2);
text2.innerHTML = new2 + " g emisí CO2";
}, 1000 / Clklid);
}
<div id="text"></div>
<div id="text2"></div>