I created the following code to calculate numbers multiplied by eleven using the method I learnt in elementary school:
function multiplyby11(number) {
var num = number + "";
var integers = num.split('');
var numbers = [];
integers.forEach(function (val) {
numbers.push(parseInt(val));
});
var multiply = [];
multiply.push(numbers[0]);
var number_length = numbers.length;
for (var i = 1; i < number_length; ++i) {
multiply.push(numbers[i] + numbers[i - 1])
}
multiply.push(numbers[number_length - 1]);
function removeAllMultiplesOfTen() {
var allRemoved = true;
multiply.forEach(function (val, index) {
if (val >= 10) {
val -= 10;
multiply[index - 1]++;
multiply[index] = val;
allRemoved = false;
}
});
if (!allRemoved) {
removeAllMultiplesOfTen();
}
}
removeAllMultiplesOfTen();
return multiply.join('');
}
If I input a number like 15487548796454858
my code returns
170363036761003438
But if I just do a plain javascript calculation of 15487548796454858 * 11
it returns:
170363036761003420
which is completely incorrect.
Am I running into some sort of number overflow in javascript, or did I miss something?