Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter
with Array#indexOf
and Array#lastIndexOf
and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map
and set the value to false
, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce()
and then loop over the array again with .filter()
to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE). The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key. However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/