I was looking at the implementation of indexOf
from MDN. This is what I'm curious about:
n = (n > 0 || -1) * Math.floor(Math.abs(n));
It seems to me that (n > 0 || -1)
is going to evaluate to true
or false
, but then it's being multiplied?
In case the link ever breaks, this is the indexOf
implementation from MDN:
if (!Array.prototype.indexOf) {
Array.prototype.indexOf = function (searchElement /*, fromIndex */ ) {
"use strict";
if (this == null) {
throw new TypeError();
}
var t = Object(this);
var len = t.length >>> 0;
if (len === 0) {
return -1;
}
var n = 0;
if (arguments.length > 1) {
n = Number(arguments[1]);
if (n != n) { // shortcut for verifying if it's NaN
n = 0;
} else if (n != 0 && n != Infinity && n != -Infinity) {
n = (n > 0 || -1) * Math.floor(Math.abs(n));
}
}
if (n >= len) {
return -1;
}
var k = n >= 0 ? n : Math.max(len - Math.abs(n), 0);
for (; k < len; k++) {
if (k in t && t[k] === searchElement) {
return k;
}
}
return -1;
}
}