Possible Duplicate:
Is JavaScript’s Floating-Point Math Broken?
I have a strange mathematical problem during a multiplication in javascript.
$(parent).find('#id_deals-' + i + '-quantity').val()
result -> 10
$(parent).find('#id_deals-' + i + '-price').val()
result -> 3.99
Both above mulltiplied like this:
$(parent).find('#id_deals-' + i + '-price').val() * $(parent).find('#id_deals-' + i + '-quantity').val()
result --> 39.900000000000006
Why is this happening? and what can I do to limit the decimal places to 2 digits only?
Is it maybe because 10 has to be 10.0 ? But how do I convert my value to this format automatically before the actual multiplication?
Update: According to syazdani's answer, I have tried to implement bigdecimal as suggested:
It is not well documented, but I got it working like this:
function run(opts) {
var bd = {"BigDecimal":BigDecimal, "BigInteger":BigInteger, "RoundingMode":RoundingMode};
var result;
var ops = {'*': "multiply", '/': "divide", '+': "add", '-': "subtract"};
var a = new bd.BigDecimal("" + opts.a);
var b = new bd.BigDecimal("" + opts.b);
var op = ops[opts.op];
if (op == "divide") {
return a.divide(b, 300, bd.RoundingMode.HALF_UP());
} else {
return a[op].call(a, b);
}
}
function multiply(a, b){
return run({"a":a,"b":b,"op":"*"});
}