I wrote the following Javascript function that is meant to return true
when exactly one of the arguments is truthy:
function onlyOne( a, b, c) {
return (a && !b && !c) ||
(b && !a && !c) ||
(c && !a && !b);
}
It works as expected, except in a couple of cases where it returns 0
:
EXPECT true:
true = onlyOne( 1, 0, 0)
true = onlyOne( 0, 1, 0)
true = onlyOne( 0, 0, 1)
EXPECT false:
0 = onlyOne( 0, 0, 0)
false = onlyOne( 0, 1, 1)
false = onlyOne( 1, 0, 1)
0 = onlyOne( 1, 1, 0)
false = onlyOne( 1, 1, 1)
WHY does it return 0
rather than false in the two cases above? I see that those are the only two negative cases where c
is 0, but I don't see how that makes a difference. Obviously there's something about Javascript logical operators and/or type conversion that I'm not understanding here.
Here is the full code that produces the above output. I'm running this from the Windows command line using node.js 4.3.0, if that makes any difference:
function testOnlyOne( a, b, c) {
result = onlyOne(a, b, c);
console.log("%j = onlyOne( %j, %j, %j)", result, a, b, c);
}
function onlyOne( a, b, c) {
return (a && !b && !c) ||
(b && !a && !c) ||
(c && !a && !b);
}
console.log("\nEXPECT true:");
testOnlyOne( 1, 0, 0);
testOnlyOne( 0, 1, 0);
testOnlyOne( 0, 0, 1);
console.log("\nEXPECT false:");
testOnlyOne( 0, 0, 0);
testOnlyOne( 0, 1, 1);
testOnlyOne( 1, 0, 1);
testOnlyOne( 1, 1, 0);
testOnlyOne( 1, 1, 1);