I am trying to build unique array of arrays such that whenever I have new array to add it should only add if it doesn\'t already exist in collection
E.g. store all un
You can subclass Set
for more flexibility in storing objects by storing the result of calling JSON.stringify
on added objects.
class ObjectSet extends Set{
add(elem){
return super.add(typeof elem === 'object' ? JSON.stringify(elem) : elem);
}
has(elem){
return super.has(typeof elem === 'object' ? JSON.stringify(elem) : elem);
}
}
let set = new ObjectSet([[1,1,2],[1,2,1],[1,1,2],[1,2,1],[2,1,1],[2,1,1]]);
console.log([...set]);
console.log([...set].map(JSON.parse));//get objects back
If you are ok to use a library, try lodash uniqWith. This will recursively find groups of arrays OR objects with the comparator of your choice: equal in your case.
var arrayofarrays = [ [1,1,2], [1,2,1], [1,1,2], [1,2,1], [2,1,1], [2,1,1] ]
const uniqarray = _.uniqWith(arrayofarrays, _.isEqual);
console.log(uniqarray) //=> [[1, 1, 2], [1, 2, 1], [2, 1, 1]]
Bonus: it works on array of objects too
var objects = [{ 'x': 1, 'y': {b:1} }, { 'x': 1, 'y': {b:1} },
{ 'x': 2, 'y': {b:1} }, { 'x': 1, 'y': 2 } ];
const uniqarray = _.uniqWith(objects, _.isEqual);
console.log(uniqarray)
// => [{x: 1, y: {b: 1}}, {x: 2, y: {b: 1}}, {x: 1, y: 2}]
To get around the problem of each array being a unique object, you can stringify it so it's no longer unique, then map it back to an array later. This should do the trick:
var arr = [
[1, 1, 2],
[1, 2, 1],
[1, 1, 2],
[1, 2, 1],
[2, 1, 1],
[2, 1, 1]
];
var unique = arr.map(cur => JSON.stringify(cur))
.filter(function(curr, index, self) {
return self.indexOf(curr) == index;
})
.map(cur => JSON.parse(cur))
console.log(unique);
One way would be to convert the arrays to JSON strings, then use a Set to get unique values, and convert back again
var arr = [
[1, 1, 2],
[1, 2, 1],
[1, 1, 2],
[1, 2, 1],
[2, 1, 1],
[2, 1, 1]
];
let set = new Set(arr.map(JSON.stringify));
let arr2 = Array.from(set).map(JSON.parse);
console.log(arr2)