What's the simplest, library-free code for implementing array intersections in javascript? I want to write
intersection([1,2,3], [2,3,4,5])
and get
[2, 3]
Use a combination of Array.prototype.filter and Array.prototype.includes:
const filteredArray = array1.filter(value => array2.includes(value));
For older browsers, with Array.prototype.indexOf and without an arrow function:
var filteredArray = array1.filter(function(n) {
return array2.indexOf(n) !== -1;
});
NB! Both .includes and .indexOf internally compares elements in the array by using ===, so if the array contains objects it will only compare object references (not their content). If you want to specify your own comparison logic, use Array.prototype.some instead.
Destructive seems simplest, especially if we can assume the input is sorted:
/* destructively finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
* State of input arrays is undefined when
* the function returns. They should be
* (prolly) be dumped.
*
* Should have O(n) operations, where n is
* n = MIN(a.length, b.length)
*/
function intersection_destructive(a, b)
{
var result = [];
while( a.length > 0 && b.length > 0 )
{
if (a[0] < b[0] ){ a.shift(); }
else if (a[0] > b[0] ){ b.shift(); }
else /* they're equal */
{
result.push(a.shift());
b.shift();
}
}
return result;
}
Non-destructive has to be a hair more complicated, since we’ve got to track indices:
/* finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
*
* Should have O(n) operations, where n is
* n = MIN(a.length(), b.length())
*/
function intersect_safe(a, b)
{
var ai=0, bi=0;
var result = [];
while( ai < a.length && bi < b.length )
{
if (a[ai] < b[bi] ){ ai++; }
else if (a[ai] > b[bi] ){ bi++; }
else /* they're equal */
{
result.push(a[ai]);
ai++;
bi++;
}
}
return result;
}
If your environment supports ECMAScript 6 Set, one simple and supposedly efficient (see specification link) way:
function intersect(a, b) {
var setA = new Set(a);
var setB = new Set(b);
var intersection = new Set([...setA].filter(x => setB.has(x)));
return Array.from(intersection);
}
Shorter, but less readable (also without creating the additional intersection Set):
function intersect(a, b) {
var setB = new Set(b);
return [...new Set(a)].filter(x => setB.has(x));
}
Note that when using sets you will only get distinct values, thus new Set([1, 2, 3, 3]).size evaluates to 3.
Using Underscore.js or lodash.js
_.intersection( [0,345,324] , [1,0,324] ) // gives [0,324]
// Return elements of array a that are also in b in linear time:
function intersect(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
// Example:
console.log(intersect([1,2,3], [2,3,4,5]));
I recommend above succinct solution which outperforms other implementations on large inputs. If performance on small inputs matters, check the alternatives below.
Alternatives and performance comparison:
See the following snippet for alternative implementations and check https://jsperf.com/array-intersection-comparison for performance comparisons.
function intersect_for(a, b) {
const result = [];
const alen = a.length;
const blen = b.length;
for (let i = 0; i < alen; ++i) {
const ai = a[i];
for (let j = 0; j < blen; ++j) {
if (ai === b[j]) {
result.push(ai);
break;
}
}
}
return result;
}
function intersect_filter_indexOf(a, b) {
return a.filter(el => b.indexOf(el) !== -1);
}
function intersect_filter_in(a, b) {
const map = b.reduce((map, el) => {map[el] = true; return map}, {});
return a.filter(el => el in map);
}
function intersect_for_in(a, b) {
const result = [];
const map = {};
for (let i = 0, length = b.length; i < length; ++i) {
map[b[i]] = true;
}
for (let i = 0, length = a.length; i < length; ++i) {
if (a[i] in map) result.push(a[i]);
}
return result;
}
function intersect_filter_includes(a, b) {
return a.filter(el => b.includes(el));
}
function intersect_filter_has_this(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
function intersect_filter_has_arrow(a, b) {
const set = new Set(b);
return a.filter(el => set.has(el));
}
function intersect_for_has(a, b) {
const result = [];
const set = new Set(b);
for (let i = 0, length = a.length; i < length; ++i) {
if (set.has(a[i])) result.push(a[i]);
}
return result;
}
Results in Firefox 53:
Ops/sec on large arrays (10,000 elements):
filter + has (this) 523 (this answer)
for + has 482
for-loop + in 279
filter + in 242
for-loops 24
filter + includes 14
filter + indexOf 10
Ops/sec on small arrays (100 elements):
for-loop + in 384,426
filter + in 192,066
for-loops 159,137
filter + includes 104,068
filter + indexOf 71,598
filter + has (this) 43,531 (this answer)
filter + has (arrow function) 35,588
My contribution in ES6 terms. In general it finds the intersection of an array with indefinite number of arrays provided as arguments.
Array.prototype.intersect = function(...a) {
return [this,...a].reduce((p,c) => p.filter(e => c.includes(e)));
}
var arrs = [[0,2,4,6,8],[4,5,6,7],[4,6]],
arr = [0,1,2,3,4,5,6,7,8,9];
document.write("<pre>" + JSON.stringify(arr.intersect(...arrs)) + "</pre>");
How about just using associative arrays?
function intersect(a, b) {
var d1 = {};
var d2 = {};
var results = [];
for (var i = 0; i < a.length; i++) {
d1[a[i]] = true;
}
for (var j = 0; j < b.length; j++) {
d2[b[j]] = true;
}
for (var k in d1) {
if (d2[k])
results.push(k);
}
return results;
}
edit:
// new version
function intersect(a, b) {
var d = {};
var results = [];
for (var i = 0; i < b.length; i++) {
d[b[i]] = true;
}
for (var j = 0; j < a.length; j++) {
if (d[a[j]])
results.push(a[j]);
}
return results;
}
The performance of #atk's implementation for sorted arrays of primitives can be improved by using .pop rather than .shift.
function intersect(array1, array2) {
var result = [];
// Don't destroy the original arrays
var a = array1.slice(0);
var b = array2.slice(0);
var aLast = a.length - 1;
var bLast = b.length - 1;
while (aLast >= 0 && bLast >= 0) {
if (a[aLast] > b[bLast] ) {
a.pop();
aLast--;
} else if (a[aLast] < b[bLast] ){
b.pop();
bLast--;
} else /* they're equal */ {
result.push(a.pop());
b.pop();
aLast--;
bLast--;
}
}
return result;
}
I created a benchmark using jsPerf. It's about three times faster to use .pop.
If you need to have it handle intersecting multiple arrays:
const intersect = (a1, a2, ...rest) => {
const a12 = a1.filter(value => a2.includes(value))
if (rest.length === 0) { return a12; }
return intersect(a12, ...rest);
};
console.log(intersect([1,2,3,4,5], [1,2], [1, 2, 3,4,5], [2, 10, 1]))
Sort it
check one by one from the index 0, create new array from that.
Something like this, Not tested well though.
function intersection(x,y){
x.sort();y.sort();
var i=j=0;ret=[];
while(i<x.length && j<y.length){
if(x[i]<y[j])i++;
else if(y[j]<x[i])j++;
else {
ret.push(x[i]);
i++,j++;
}
}
return ret;
}
alert(intersection([1,2,3], [2,3,4,5]));
PS:The algorithm only intended for Numbers and Normal Strings, intersection of arbitary object arrays may not work.
Using jQuery:
var a = [1,2,3];
var b = [2,3,4,5];
var c = $(b).not($(b).not(a));
alert(c);
A tiny tweak to the smallest one here (the filter/indexOf solution), namely creating an index of the values in one of the arrays using a JavaScript object, will reduce it from O(N*M) to "probably" linear time. source1 source2
function intersect(a, b) {
var aa = {};
a.forEach(function(v) { aa[v]=1; });
return b.filter(function(v) { return v in aa; });
}
This isn't the very simplest solution (it's more code than filter+indexOf), nor is it the very fastest (probably slower by a constant factor than intersect_safe()), but seems like a pretty good balance. It is on the very simple side, while providing good performance, and it doesn't require pre-sorted inputs.
For arrays containing only strings or numbers you can do something with sorting, as per some of the other answers. For the general case of arrays of arbitrary objects I don't think you can avoid doing it the long way. The following will give you the intersection of any number of arrays provided as parameters to arrayIntersection:
var arrayContains = Array.prototype.indexOf ?
function(arr, val) {
return arr.indexOf(val) > -1;
} :
function(arr, val) {
var i = arr.length;
while (i--) {
if (arr[i] === val) {
return true;
}
}
return false;
};
function arrayIntersection() {
var val, arrayCount, firstArray, i, j, intersection = [], missing;
var arrays = Array.prototype.slice.call(arguments); // Convert arguments into a real array
// Search for common values
firstArray = arrays.pop();
if (firstArray) {
j = firstArray.length;
arrayCount = arrays.length;
while (j--) {
val = firstArray[j];
missing = false;
// Check val is present in each remaining array
i = arrayCount;
while (!missing && i--) {
if ( !arrayContains(arrays[i], val) ) {
missing = true;
}
}
if (!missing) {
intersection.push(val);
}
}
}
return intersection;
}
arrayIntersection( [1, 2, 3, "a"], [1, "a", 2], ["a", 1] ); // Gives [1, "a"];
Simplest, fastest O(n) and shortest way:
function intersection (a, b) {
const setA = new Set(a);
return b.filter(value => setA.has(value));
}
console.log(intersection([1,2,3], [2,3,4,5]))
#nbarbosa has almost the same answer but he cast both arrays to Set and then back to array. There is no need for any extra casting.
Another indexed approach able to process any number of arrays at once:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = 0;
index[v]++;
};
};
var retv = [];
for (var i in index) {
if (index[i] == arrLength) retv.push(i);
};
return retv;
};
It works only for values that can be evaluated as strings and you should pass them as an array like:
intersect ([arr1, arr2, arr3...]);
...but it transparently accepts objects as parameter or as any of the elements to be intersected (always returning array of common values). Examples:
intersect ({foo: [1, 2, 3, 4], bar: {a: 2, j:4}}); // [2, 4]
intersect ([{x: "hello", y: "world"}, ["hello", "user"]]); // ["hello"]
EDIT: I just noticed that this is, in a way, slightly buggy.
That is: I coded it thinking that input arrays cannot itself contain repetitions (as provided example doesn't).
But if input arrays happen to contain repetitions, that would produce wrong results. Example (using below implementation):
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]);
// Expected: [ '1' ]
// Actual: [ '1', '3' ]
Fortunately this is easy to fix by simply adding second level indexing. That is:
Change:
if (index[v] === undefined) index[v] = 0;
index[v]++;
by:
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
...and:
if (index[i] == arrLength) retv.push(i);
by:
if (Object.keys(index[i]).length == arrLength) retv.push(i);
Complete example:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
};
};
var retv = [];
for (var i in index) {
if (Object.keys(index[i]).length == arrLength) retv.push(i);
};
return retv;
};
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]); // [ '1' ]
With some restrictions on your data, you can do it in linear time!
For positive integers: use an array mapping the values to a "seen/not seen" boolean.
function intersectIntegers(array1,array2) {
var seen=[],
result=[];
for (var i = 0; i < array1.length; i++) {
seen[array1[i]] = true;
}
for (var i = 0; i < array2.length; i++) {
if ( seen[array2[i]])
result.push(array2[i]);
}
return result;
}
There is a similar technique for objects: take a dummy key, set it to "true" for each element in array1, then look for this key in elements of array2. Clean up when you're done.
function intersectObjects(array1,array2) {
var result=[];
var key="tmpKey_intersect"
for (var i = 0; i < array1.length; i++) {
array1[i][key] = true;
}
for (var i = 0; i < array2.length; i++) {
if (array2[i][key])
result.push(array2[i]);
}
for (var i = 0; i < array1.length; i++) {
delete array1[i][key];
}
return result;
}
Of course you need to be sure the key didn't appear before, otherwise you'll be destroying your data...
function intersection(A,B){
var result = new Array();
for (i=0; i<A.length; i++) {
for (j=0; j<B.length; j++) {
if (A[i] == B[j] && $.inArray(A[i],result) == -1) {
result.push(A[i]);
}
}
}
return result;
}
For simplicity:
// Usage
const intersection = allLists
.reduce(intersect, allValues)
.reduce(removeDuplicates, []);
// Implementation
const intersect = (intersection, list) =>
intersection.filter(item =>
list.some(x => x === item));
const removeDuplicates = (uniques, item) =>
uniques.includes(item) ? uniques : uniques.concat(item);
// Example Data
const somePeople = [bob, doug, jill];
const otherPeople = [sarah, bob, jill];
const morePeople = [jack, jill];
const allPeople = [...somePeople, ...otherPeople, ...morePeople];
const allGroups = [somePeople, otherPeople, morePeople];
// Example Usage
const intersection = allGroups
.reduce(intersect, allPeople)
.reduce(removeDuplicates, []);
intersection; // [jill]
Benefits:
dirt simple
data-centric
works for arbitrary number of lists
works for arbitrary lengths of lists
works for arbitrary types of values
works for arbitrary sort order
retains shape (order of first appearance in any array)
exits early where possible
memory safe, short of tampering with Function / Array prototypes
Drawbacks:
higher memory usage
higher CPU usage
requires an understanding of reduce
requires understanding of data flow
You wouldn't want to use this for 3D engine or kernel work, but if you have problems getting this to run in an event-based app, your design has bigger problems.
I'll contribute with what has been working out best for me:
if (!Array.prototype.intersect){
Array.prototype.intersect = function (arr1) {
var r = [], o = {}, l = this.length, i, v;
for (i = 0; i < l; i++) {
o[this[i]] = true;
}
l = arr1.length;
for (i = 0; i < l; i++) {
v = arr1[i];
if (v in o) {
r.push(v);
}
}
return r;
};
}
A functional approach with ES2015
A functional approach must consider using only pure functions without side effects, each of which is only concerned with a single job.
These restrictions enhance the composability and reusability of the functions involved.
// small, reusable auxiliary functions
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const apply = f => x => f(x);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// run it
console.log( intersect(xs) (ys) );
Please note that the native Set type is used, which has an advantageous
lookup performance.
Avoid duplicates
Obviously repeatedly occurring items from the first Array are preserved, while the second Array is de-duplicated. This may be or may be not the desired behavior. If you need a unique result just apply dedupe to the first argument:
// auxiliary functions
const apply = f => x => f(x);
const comp = f => g => x => f(g(x));
const afrom = apply(Array.from);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// de-duplication
const dedupe = comp(afrom) (createSet);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// unique result
console.log( intersect(dedupe(xs)) (ys) );
Compute the intersection of any number of Arrays
If you want to compute the intersection of an arbitrarily number of Arrays just compose intersect with foldl. Here is a convenience function:
// auxiliary functions
const apply = f => x => f(x);
const uncurry = f => (x, y) => f(x) (y);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const foldl = f => acc => xs => xs.reduce(uncurry(f), acc);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// intersection of an arbitrarily number of Arrays
const intersectn = (head, ...tail) => foldl(intersect) (head) (tail);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
const zs = [0,1,2,3,4,5,6];
// run
console.log( intersectn(xs, ys, zs) );
.reduce to build a map, and .filter to find the intersection. delete within the .filter allows us to treat the second array as though it's a unique set.
function intersection (a, b) {
var seen = a.reduce(function (h, k) {
h[k] = true;
return h;
}, {});
return b.filter(function (k) {
var exists = seen[k];
delete seen[k];
return exists;
});
}
I find this approach pretty easy to reason about. It performs in constant time.
I have written an intesection function which can even detect intersection of array of objects based on particular property of those objects.
For instance,
if arr1 = [{id: 10}, {id: 20}]
and arr2 = [{id: 20}, {id: 25}]
and we want intersection based on the id property, then the output should be :
[{id: 20}]
As such, the function for the same (note: ES6 code) is :
const intersect = (arr1, arr2, accessors = [v => v, v => v]) => {
const [fn1, fn2] = accessors;
const set = new Set(arr2.map(v => fn2(v)));
return arr1.filter(value => set.has(fn1(value)));
};
and you can call the function as:
intersect(arr1, arr2, [elem => elem.id, elem => elem.id])
Also note: this function finds intersection considering the first array is the primary array and thus the intersection result will be that of the primary array.
This function avoids the N^2 problem, taking advantage of the power of dictionaries. Loops through each array only once, and a third and shorter loop to return the final result.
It also supports numbers, strings, and objects.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [];
// Returns a unique reference string for the type and value of the element
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
array1.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
}
});
array2.forEach(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
mergedElems[key].inArray2 = true;
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
return result;
}
If there is a special case that cannot be solved, just by modifying the generateStrKey function, it could surely be solved. The trick of this function is that it uniquely represents each different data according to type and value.
This variant has some performance improvements. Avoid loops in case any array is empty. It also starts by walking through the shorter array first, so if it finds all the values of the first array in the second array, exits the loop.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [],
firstArray, secondArray,
firstN = 0,
secondN = 0;
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
// Executes the loops only if both arrays have values
if (array1.length && array2.length)
{
// Begins with the shortest array to optimize the algorithm
if (array1.length < array2.length) {
firstArray = array1;
secondArray = array2;
} else {
firstArray = array2;
secondArray = array1;
}
firstArray.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
// Increases the counter of unique values in the first array
firstN++;
}
});
secondArray.some(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
if (!mergedElems[key].inArray2) {
mergedElems[key].inArray2 = true;
// Increases the counter of matches
secondN++;
// If all elements of first array have coincidence, then exits the loop
return (secondN === firstN);
}
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
}
return result;
}
Here is underscore.js implementation:
_.intersection = function(array) {
if (array == null) return [];
var result = [];
var argsLength = arguments.length;
for (var i = 0, length = array.length; i < length; i++) {
var item = array[i];
if (_.contains(result, item)) continue;
for (var j = 1; j < argsLength; j++) {
if (!_.contains(arguments[j], item)) break;
}
if (j === argsLength) result.push(item);
}
return result;
};
Source: http://underscorejs.org/docs/underscore.html#section-62
Create an Object using one array and loop through the second array to check if the value exists as key.
function intersection(arr1, arr2) {
var myObj = {};
var myArr = [];
for (var i = 0, len = arr1.length; i < len; i += 1) {
if(myObj[arr1[i]]) {
myObj[arr1[i]] += 1;
} else {
myObj[arr1[i]] = 1;
}
}
for (var j = 0, len = arr2.length; j < len; j += 1) {
if(myObj[arr2[j]] && myArr.indexOf(arr2[j]) === -1) {
myArr.push(arr2[j]);
}
}
return myArr;
}
I think using an object internally can help with computations and could be performant too.
// Approach maintains a count of each element and works for negative elements too
function intersect(a,b){
const A = {};
a.forEach((v)=>{A[v] ? ++A[v] : A[v] = 1});
const B = {};
b.forEach((v)=>{B[v] ? ++B[v] : B[v] = 1});
const C = {};
Object.entries(A).map((x)=>C[x[0]] = Math.min(x[1],B[x[0]]))
return Object.entries(C).map((x)=>Array(x[1]).fill(Number(x[0]))).flat();
}
const x = [1,1,-1,-1,0,0,2,2];
const y = [2,0,1,1,1,1,0,-1,-1,-1];
const result = intersect(x,y);
console.log(result); // (7) [0, 0, 1, 1, 2, -1, -1]
I am using map even object could be used.
//find intersection of 2 arrs
const intersections = (arr1,arr2) => {
let arrf = arr1.concat(arr2)
let map = new Map();
let union = [];
for(let i=0; i<arrf.length; i++){
if(map.get(arrf[i])){
map.set(arrf[i],false);
}else{
map.set(arrf[i],true);
}
}
map.forEach((v,k)=>{if(!v){union.push(k);}})
return union;
}
This is a proposed standard: With the currently stage 2 proposal https://github.com/tc39/proposal-set-methods, you could use
mySet.intersection(mySet2);
Until then, you could use Immutable.js's Set, which inspired that proposal
Immutable.Set(mySet).intersect(mySet2)
I extended tarulen's answer to work with any number of arrays. It also should work with non-integer values.
function intersect() {
const last = arguments.length - 1;
var seen={};
var result=[];
for (var i = 0; i < last; i++) {
for (var j = 0; j < arguments[i].length; j++) {
if (seen[arguments[i][j]]) {
seen[arguments[i][j]] += 1;
}
else if (!i) {
seen[arguments[i][j]] = 1;
}
}
}
for (var i = 0; i < arguments[last].length; i++) {
if ( seen[arguments[last][i]] === last)
result.push(arguments[last][i]);
}
return result;
}
If your arrays are sorted, this should run in O(n), where n is min( a.length, b.length )
function intersect_1d( a, b ){
var out=[], ai=0, bi=0, acurr, bcurr, last=Number.MIN_SAFE_INTEGER;
while( ( acurr=a[ai] )!==undefined && ( bcurr=b[bi] )!==undefined ){
if( acurr < bcurr){
if( last===acurr ){
out.push( acurr );
}
last=acurr;
ai++;
}
else if( acurr > bcurr){
if( last===bcurr ){
out.push( bcurr );
}
last=bcurr;
bi++;
}
else {
out.push( acurr );
last=acurr;
ai++;
bi++;
}
}
return out;
}
Related
This question already has answers here:
Cartesian product of multiple arrays in JavaScript
(35 answers)
Closed 1 year ago.
I'm having trouble coming up with code to generate combinations from n number of arrays with m number of elements in them, in JavaScript. I've seen similar questions about this for other languages, but the answers incorporate syntactic or library magic that I'm unsure how to translate.
Consider this data:
[[0,1], [0,1,2,3], [0,1,2]]
3 arrays, with a different number of elements in them. What I want to do is get all combinations by combining an item from each array.
For example:
0,0,0 // item 0 from array 0, item 0 from array 1, item 0 from array 2
0,0,1
0,0,2
0,1,0
0,1,1
0,1,2
0,2,0
0,2,1
0,2,2
And so on.
If the number of arrays were fixed, it would be easy to make a hard coded implementation. But the number of arrays may vary:
[[0,1], [0,1]]
[[0,1,3,4], [0,1], [0], [0,1]]
Any help would be much appreciated.
Here is a quite simple and short one using a recursive helper function:
function cartesian(...args) {
var r = [], max = args.length-1;
function helper(arr, i) {
for (var j=0, l=args[i].length; j<l; j++) {
var a = arr.slice(0); // clone arr
a.push(args[i][j]);
if (i==max)
r.push(a);
else
helper(a, i+1);
}
}
helper([], 0);
return r;
}
Usage:
cartesian([0,1], [0,1,2,3], [0,1,2]);
To make the function take an array of arrays, just change the signature to function cartesian(args) instead of using rest parameter syntax.
I suggest a simple recursive generator function:
// JS
function* cartesianIterator(head, ...tail) {
const remainder = tail.length ? cartesianIterator(...tail) : [[]];
for (let r of remainder) for (let h of head) yield [h, ...r];
}
// get values:
const cartesian = items => [...cartesianIterator(items)];
console.log(cartesian(input));
// TS
function* cartesianIterator<T>(items: T[][]): Generator<T[]> {
const remainder = items.length > 1 ? cartesianIterator(items.slice(1)) : [[]];
for (let r of remainder) for (let h of items.at(0)!) yield [h, ...r];
}
// get values:
const cartesian = <T>(items: T[][]) => [...cartesianIterator(items)];
console.log(cartesian(input));
You could take an iterative approach by building sub arrays.
var parts = [[0, 1], [0, 1, 2, 3], [0, 1, 2]],
result = parts.reduce((a, b) => a.reduce((r, v) => r.concat(b.map(w => [].concat(v, w))), []));
console.log(result.map(a => a.join(', ')));
.as-console-wrapper { max-height: 100% !important; top: 0; }
After doing a little research I discovered a previous related question:
Finding All Combinations of JavaScript array values
I've adapted some of the code from there so that it returns an array of arrays containing all of the permutations:
function(arraysToCombine) {
var divisors = [];
for (var i = arraysToCombine.length - 1; i >= 0; i--) {
divisors[i] = divisors[i + 1] ? divisors[i + 1] * arraysToCombine[i + 1].length : 1;
}
function getPermutation(n, arraysToCombine) {
var result = [],
curArray;
for (var i = 0; i < arraysToCombine.length; i++) {
curArray = arraysToCombine[i];
result.push(curArray[Math.floor(n / divisors[i]) % curArray.length]);
}
return result;
}
var numPerms = arraysToCombine[0].length;
for(var i = 1; i < arraysToCombine.length; i++) {
numPerms *= arraysToCombine[i].length;
}
var combinations = [];
for(var i = 0; i < numPerms; i++) {
combinations.push(getPermutation(i, arraysToCombine));
}
return combinations;
}
I've put a working copy at http://jsfiddle.net/7EakX/ that takes the array you gave earlier ([[0,1], [0,1,2,3], [0,1,2]]) and outputs the result to the browser console.
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
console.log(charSet.reduce((a,b)=>a.flatMap(x=>b.map(y=>x+y)),['']))
Just for fun, here's a more functional variant of the solution in my first answer:
function cartesian() {
var r = [], args = Array.from(arguments);
args.reduceRight(function(cont, factor, i) {
return function(arr) {
for (var j=0, l=factor.length; j<l; j++) {
var a = arr.slice(); // clone arr
a[i] = factor[j];
cont(a);
}
};
}, Array.prototype.push.bind(r))(new Array(args.length));
return r;
}
Alternative, for full speed we can dynamically compile our own loops:
function cartesian() {
return (cartesian.cache[arguments.length] || cartesian.compile(arguments.length)).apply(null, arguments);
}
cartesian.cache = [];
cartesian.compile = function compile(n) {
var args = [],
indent = "",
up = "",
down = "";
for (var i=0; i<n; i++) {
var arr = "$"+String.fromCharCode(97+i),
ind = String.fromCharCode(105+i);
args.push(arr);
up += indent+"for (var "+ind+"=0, l"+arr+"="+arr+".length; "+ind+"<l"+arr+"; "+ind+"++) {\n";
down = indent+"}\n"+down;
indent += " ";
up += indent+"arr["+i+"] = "+arr+"["+ind+"];\n";
}
var body = "var res=[],\n arr=[];\n"+up+indent+"res.push(arr.slice());\n"+down+"return res;";
return cartesian.cache[n] = new Function(args, body);
}
var f = function(arr){
if(typeof arr !== 'object'){
return false;
}
arr = arr.filter(function(elem){ return (elem !== null); }); // remove empty elements - make sure length is correct
var len = arr.length;
var nextPerm = function(){ // increase the counter(s)
var i = 0;
while(i < len)
{
arr[i].counter++;
if(arr[i].counter >= arr[i].length){
arr[i].counter = 0;
i++;
}else{
return false;
}
}
return true;
};
var getPerm = function(){ // get the current permutation
var perm_arr = [];
for(var i = 0; i < len; i++)
{
perm_arr.push(arr[i][arr[i].counter]);
}
return perm_arr;
};
var new_arr = [];
for(var i = 0; i < len; i++) // set up a counter property inside the arrays
{
arr[i].counter = 0;
}
while(true)
{
new_arr.push(getPerm()); // add current permutation to the new array
if(nextPerm() === true){ // get next permutation, if returns true, we got them all
break;
}
}
return new_arr;
};
Here's another way of doing it. I treat the indices of all of the arrays like a number whose digits are all different bases (like time and dates), using the length of the array as the radix.
So, using your first set of data, the first digit is base 2, the second is base 4, and the third is base 3. The counter starts 000, then goes 001, 002, then 010. The digits correspond to indices in the arrays, and since order is preserved, this is no problem.
I have a fiddle with it working here: http://jsfiddle.net/Rykus0/DS9Ea/1/
and here is the code:
// Arbitrary base x number class
var BaseX = function(initRadix){
this.radix = initRadix ? initRadix : 1;
this.value = 0;
this.increment = function(){
return( (this.value = (this.value + 1) % this.radix) === 0);
}
}
function combinations(input){
var output = [], // Array containing the resulting combinations
counters = [], // Array of counters corresponding to our input arrays
remainder = false, // Did adding one cause the previous digit to rollover?
temp; // Holds one combination to be pushed into the output array
// Initialize the counters
for( var i = input.length-1; i >= 0; i-- ){
counters.unshift(new BaseX(input[i].length));
}
// Get all possible combinations
// Loop through until the first counter rolls over
while( !remainder ){
temp = []; // Reset the temporary value collection array
remainder = true; // Always increment the last array counter
// Process each of the arrays
for( i = input.length-1; i >= 0; i-- ){
temp.unshift(input[i][counters[i].value]); // Add this array's value to the result
// If the counter to the right rolled over, increment this one.
if( remainder ){
remainder = counters[i].increment();
}
}
output.push(temp); // Collect the results.
}
return output;
}
// Input is an array of arrays
console.log(combinations([[0,1], [0,1,2,3], [0,1,2]]));
You can use a recursive function to get all combinations
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
let loopOver = (arr, str = '', final = []) => {
if (arr.length > 1) {
arr[0].forEach(v => loopOver(arr.slice(1), str + v, final))
} else {
arr[0].forEach(v => final.push(str + v))
}
return final
}
console.log(loopOver(charSet))
This code can still be shorten using ternary but i prefer the first version for readability 😊
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
let loopOver = (arr, str = '') => arr[0].map(v => arr.length > 1 ? loopOver(arr.slice(1), str + v) : str + v).flat()
console.log(loopOver(charSet))
Another implementation with ES6 recursive style
Array.prototype.cartesian = function(a,...as){
return a ? this.reduce((p,c) => (p.push(...a.cartesian(...as).map(e => as.length ? [c,...e] : [c,e])),p),[])
: this;
};
console.log(JSON.stringify([0,1].cartesian([0,1,2,3], [[0],[1],[2]])));
This question already has answers here:
Cartesian product of multiple arrays in JavaScript
(35 answers)
Closed 1 year ago.
I'm having trouble coming up with code to generate combinations from n number of arrays with m number of elements in them, in JavaScript. I've seen similar questions about this for other languages, but the answers incorporate syntactic or library magic that I'm unsure how to translate.
Consider this data:
[[0,1], [0,1,2,3], [0,1,2]]
3 arrays, with a different number of elements in them. What I want to do is get all combinations by combining an item from each array.
For example:
0,0,0 // item 0 from array 0, item 0 from array 1, item 0 from array 2
0,0,1
0,0,2
0,1,0
0,1,1
0,1,2
0,2,0
0,2,1
0,2,2
And so on.
If the number of arrays were fixed, it would be easy to make a hard coded implementation. But the number of arrays may vary:
[[0,1], [0,1]]
[[0,1,3,4], [0,1], [0], [0,1]]
Any help would be much appreciated.
Here is a quite simple and short one using a recursive helper function:
function cartesian(...args) {
var r = [], max = args.length-1;
function helper(arr, i) {
for (var j=0, l=args[i].length; j<l; j++) {
var a = arr.slice(0); // clone arr
a.push(args[i][j]);
if (i==max)
r.push(a);
else
helper(a, i+1);
}
}
helper([], 0);
return r;
}
Usage:
cartesian([0,1], [0,1,2,3], [0,1,2]);
To make the function take an array of arrays, just change the signature to function cartesian(args) instead of using rest parameter syntax.
I suggest a simple recursive generator function:
// JS
function* cartesianIterator(head, ...tail) {
const remainder = tail.length ? cartesianIterator(...tail) : [[]];
for (let r of remainder) for (let h of head) yield [h, ...r];
}
// get values:
const cartesian = items => [...cartesianIterator(items)];
console.log(cartesian(input));
// TS
function* cartesianIterator<T>(items: T[][]): Generator<T[]> {
const remainder = items.length > 1 ? cartesianIterator(items.slice(1)) : [[]];
for (let r of remainder) for (let h of items.at(0)!) yield [h, ...r];
}
// get values:
const cartesian = <T>(items: T[][]) => [...cartesianIterator(items)];
console.log(cartesian(input));
You could take an iterative approach by building sub arrays.
var parts = [[0, 1], [0, 1, 2, 3], [0, 1, 2]],
result = parts.reduce((a, b) => a.reduce((r, v) => r.concat(b.map(w => [].concat(v, w))), []));
console.log(result.map(a => a.join(', ')));
.as-console-wrapper { max-height: 100% !important; top: 0; }
After doing a little research I discovered a previous related question:
Finding All Combinations of JavaScript array values
I've adapted some of the code from there so that it returns an array of arrays containing all of the permutations:
function(arraysToCombine) {
var divisors = [];
for (var i = arraysToCombine.length - 1; i >= 0; i--) {
divisors[i] = divisors[i + 1] ? divisors[i + 1] * arraysToCombine[i + 1].length : 1;
}
function getPermutation(n, arraysToCombine) {
var result = [],
curArray;
for (var i = 0; i < arraysToCombine.length; i++) {
curArray = arraysToCombine[i];
result.push(curArray[Math.floor(n / divisors[i]) % curArray.length]);
}
return result;
}
var numPerms = arraysToCombine[0].length;
for(var i = 1; i < arraysToCombine.length; i++) {
numPerms *= arraysToCombine[i].length;
}
var combinations = [];
for(var i = 0; i < numPerms; i++) {
combinations.push(getPermutation(i, arraysToCombine));
}
return combinations;
}
I've put a working copy at http://jsfiddle.net/7EakX/ that takes the array you gave earlier ([[0,1], [0,1,2,3], [0,1,2]]) and outputs the result to the browser console.
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
console.log(charSet.reduce((a,b)=>a.flatMap(x=>b.map(y=>x+y)),['']))
Just for fun, here's a more functional variant of the solution in my first answer:
function cartesian() {
var r = [], args = Array.from(arguments);
args.reduceRight(function(cont, factor, i) {
return function(arr) {
for (var j=0, l=factor.length; j<l; j++) {
var a = arr.slice(); // clone arr
a[i] = factor[j];
cont(a);
}
};
}, Array.prototype.push.bind(r))(new Array(args.length));
return r;
}
Alternative, for full speed we can dynamically compile our own loops:
function cartesian() {
return (cartesian.cache[arguments.length] || cartesian.compile(arguments.length)).apply(null, arguments);
}
cartesian.cache = [];
cartesian.compile = function compile(n) {
var args = [],
indent = "",
up = "",
down = "";
for (var i=0; i<n; i++) {
var arr = "$"+String.fromCharCode(97+i),
ind = String.fromCharCode(105+i);
args.push(arr);
up += indent+"for (var "+ind+"=0, l"+arr+"="+arr+".length; "+ind+"<l"+arr+"; "+ind+"++) {\n";
down = indent+"}\n"+down;
indent += " ";
up += indent+"arr["+i+"] = "+arr+"["+ind+"];\n";
}
var body = "var res=[],\n arr=[];\n"+up+indent+"res.push(arr.slice());\n"+down+"return res;";
return cartesian.cache[n] = new Function(args, body);
}
var f = function(arr){
if(typeof arr !== 'object'){
return false;
}
arr = arr.filter(function(elem){ return (elem !== null); }); // remove empty elements - make sure length is correct
var len = arr.length;
var nextPerm = function(){ // increase the counter(s)
var i = 0;
while(i < len)
{
arr[i].counter++;
if(arr[i].counter >= arr[i].length){
arr[i].counter = 0;
i++;
}else{
return false;
}
}
return true;
};
var getPerm = function(){ // get the current permutation
var perm_arr = [];
for(var i = 0; i < len; i++)
{
perm_arr.push(arr[i][arr[i].counter]);
}
return perm_arr;
};
var new_arr = [];
for(var i = 0; i < len; i++) // set up a counter property inside the arrays
{
arr[i].counter = 0;
}
while(true)
{
new_arr.push(getPerm()); // add current permutation to the new array
if(nextPerm() === true){ // get next permutation, if returns true, we got them all
break;
}
}
return new_arr;
};
Here's another way of doing it. I treat the indices of all of the arrays like a number whose digits are all different bases (like time and dates), using the length of the array as the radix.
So, using your first set of data, the first digit is base 2, the second is base 4, and the third is base 3. The counter starts 000, then goes 001, 002, then 010. The digits correspond to indices in the arrays, and since order is preserved, this is no problem.
I have a fiddle with it working here: http://jsfiddle.net/Rykus0/DS9Ea/1/
and here is the code:
// Arbitrary base x number class
var BaseX = function(initRadix){
this.radix = initRadix ? initRadix : 1;
this.value = 0;
this.increment = function(){
return( (this.value = (this.value + 1) % this.radix) === 0);
}
}
function combinations(input){
var output = [], // Array containing the resulting combinations
counters = [], // Array of counters corresponding to our input arrays
remainder = false, // Did adding one cause the previous digit to rollover?
temp; // Holds one combination to be pushed into the output array
// Initialize the counters
for( var i = input.length-1; i >= 0; i-- ){
counters.unshift(new BaseX(input[i].length));
}
// Get all possible combinations
// Loop through until the first counter rolls over
while( !remainder ){
temp = []; // Reset the temporary value collection array
remainder = true; // Always increment the last array counter
// Process each of the arrays
for( i = input.length-1; i >= 0; i-- ){
temp.unshift(input[i][counters[i].value]); // Add this array's value to the result
// If the counter to the right rolled over, increment this one.
if( remainder ){
remainder = counters[i].increment();
}
}
output.push(temp); // Collect the results.
}
return output;
}
// Input is an array of arrays
console.log(combinations([[0,1], [0,1,2,3], [0,1,2]]));
You can use a recursive function to get all combinations
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
let loopOver = (arr, str = '', final = []) => {
if (arr.length > 1) {
arr[0].forEach(v => loopOver(arr.slice(1), str + v, final))
} else {
arr[0].forEach(v => final.push(str + v))
}
return final
}
console.log(loopOver(charSet))
This code can still be shorten using ternary but i prefer the first version for readability 😊
const charSet = [["A", "B"],["C", "D", "E"],["F", "G", "H", "I"]];
let loopOver = (arr, str = '') => arr[0].map(v => arr.length > 1 ? loopOver(arr.slice(1), str + v) : str + v).flat()
console.log(loopOver(charSet))
Another implementation with ES6 recursive style
Array.prototype.cartesian = function(a,...as){
return a ? this.reduce((p,c) => (p.push(...a.cartesian(...as).map(e => as.length ? [c,...e] : [c,e])),p),[])
: this;
};
console.log(JSON.stringify([0,1].cartesian([0,1,2,3], [[0],[1],[2]])));
Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter with Array#indexOf and Array#lastIndexOf and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map and set the value to false, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce() and then loop over the array again with .filter() to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE).
The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key.
However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/
Trying to get two data sets to intersect but I can't do it. For example, in my code below, intersecting mySet and mySet2 should yield "1" since they both have a value of "1" in their set.
var mySet = new Set();
var mySet2=new Set();
mySet.add(1);
mySet.add(2);
mySet.add("HELLOOOOO");
mySet2.add("hi");
mySet2.add(1);
var a = Array(mySet, mySet2);
console.log(a);
mySet.forEach(function(value) {
console.log(value);
});
mySet2.forEach(function(value) {
console.log(value);
});
function intersection_destructive(a, b)
{
var result = new Array();
while( mySet.length > 0 && mySet2.length > 0 )
{
if (mySet[0] < mySet2[0] ){ mySet.shift(); }
else if (mySet[0] > mySet2[0] ){ mySet2.shift(); }
else /* they're equal */
{
result.push(mySet.shift());
mySet2.shift();
}
}
return result;
}
Set 1 and Set 2 both have "1" in it but my function (intersection_destructive) doesn't return it. I'm not sure how to intersect them, I searched stackoverflow and found intersection_destructive but it didn't work for me, I also tried:
array1.filter(function(n) {
return array2.indexOf(n) != -1
});
as per this: Simplest code for array intersection in javascript
but I get an error on filter when I try to run it.
Sadly as you've figured out there are no native intersection or union operations. It's not terribly complex to find the intersection though:
let a = new Set([1,2,3])
let b = new Set([1,2,4])
let intersect = new Set([...a].filter(i => b.has(i)));
console.log(...intersect)
To get the intersection, you can iterate the items of a set and check if they belong to the other one:
var intersect = new Set();
for(var x of mySet1) if(mySet2.has(x)) intersect.add(x);
In ES7 you can simplify it with array comprehensions or generator comprehensions:
var intersect = new Set((for (x of mySet1) if (mySet2.has(x)) x));
You have been trying array intersections methods. You cannot use them on ES6 sets. Instead, use
function intersect(...sets) {
if (!sets.length) return new Set();
const i = sets.reduce((m, s, i) => s.size < sets[m].size ? i : m, 0);
const [smallest] = sets.splice(i, 1);
const res = new Set();
for (let val of smallest)
if (sets.every(s => s.has(val)))
res.add(val);
return res;
}
One technique for intersecting sets is to take the smallest set and check for each of its values in the other sets, removing the value if any set does not contain it. Although runtime is O(n x m), where n is the set length and m is the number of sets, the n is likely to be much smaller if the sets are not all the same length.
function setsIntersection(...sets) {
if (sets.length < 1) {
return new Set();
}
let min_size = sets[0].size;
let min_set_index = 0;
for (let i = 1; i < sets.length; i++) {
const size = sets[i].size;
if (size < min_size) {
min_size = size;
min_set_index = i;
}
}
const temp_swap_set = sets[0];
sets[0] = sets[min_set_index];
sets[min_set_index] = temp_swap_set;
const result = new Set(sets[min_set_index]);
for (let i = 1; i < sets.length; i++) {
for (const v of result) {
if (!sets[i].has(v)) {
result.delete(v);
}
}
}
return result;
}
You could iterate the set and create a new set for the common values.
const
intersectSets = (a, b) => {
const c = new Set;
a.forEach(v => b.has(v) && c.add(v));
return c;
},
a = new Set([1, 2, 3]),
b = new Set([1, 2, 4]),
c = new Set([1, 2, 4, 5]),
n = new Set([1, 2]);
console.log(...[a, b, c, n].reduce(intersectSets));
This will work on both Sets and Arrays. It will return whatever the type of set1 is. Arguments can be mixed types.
/**
* Returns the intersection of ES6 Set objects. i.e., returns a new set with only elements contained in all the given sets.
*
* #param {Set|Array} set1 First set
* #param {Array<Array|Set>} sets Other sets
* #returns {Set|Array} Intersection
*/
export function intersection(set1, ...sets) {
if(!sets.length) {
return set1;
}
const tmp = [...set1].filter(x => sets.every(y => Array.isArray(y) ? y.includes(x) : y.has(x)));
return Array.isArray(set1) ? tmp : new Set(tmp);
}
I built it for working with Sets, but then I realized it's probably more expensive to convert an array into a set just so I can use .has on it once than to just use .includes.
With the currently Stage 3 proposal https://github.com/tc39/proposal-set-methods, you could use:
mySet.intersection(mySet2);
const set1 = new Set([1, 2, 3]);
const set2 = new Set([1, 2, 4]);
const intersect = set1.intersection(set2);
console.log(...intersect);
<script src="https://unpkg.com/core-js-bundle#3.27.1/minified.js"></script>
Until then, you could use Immutable.js's Set, which inspired that proposal:
Immutable.Set(mySet).intersect(mySet2);
From the example: intersection_destructive takes 2 ARRAYS not 2 SETS. Here is your code working with the intersection_destructive example.
// These must be sorted arrays!
var mySet = [1,2,6];
var mySet2 = [1,2,5];
// Takes 2 Arrays
// array properties: shift
function intersection_destructive(a, b)
{
var result = [];
while( a.length > 0 && b.length > 0 )
{
if (a[0] < b[0] ){ b.shift(); }
else if (a[0] > b[0] ){ b.shift(); }
else /* they're equal */
{
result.push(a.shift());
b.shift();
}
}
return result;
};
var result = intersection_destructive(mySet, mySet2);
console.log(result); // Output: [1,2]
To use the Array.filter() is a useful pattern. You need to convert your sets to array's using Array.from(). Also make sure that you filter through smaller set into the bigger one.
see fiddle
var mySet = new Set();
var mySet2=new Set();
mySet.add(1);
mySet.add(2);
mySet.add("HELLOOOOO");
mySet2.add("hi");
mySet2.add(1);
console.log(intersection_destructive(mySet, mySet2));
function intersection_destructive(a, b)
{
// filter over the shorter set and convert sets to Array's so we have filter
if (a.length > b.length) {
var array1 = Array.from(a);
var array2 = Array.from(b);
} else {
var array1 = Array.from(b);
var array2 = Array.from(a);
}
var result = array1.filter(function(n) {
return array2.indexOf(n) != -1
});
return result;
}
var arr = ['test0','test2','test0'];
Like the above,there are two identical entries with value "test0",how to check it most efficiently?
If you sort the array, the duplicates are next to each other so that they are easy to find:
arr.sort();
var last = arr[0];
for (var i=1; i<arr.length; i++) {
if (arr[i] == last) alert('Duplicate : '+last);
last = arr[i];
}
This will do the job on any array and is probably about as optimized as possible for handling the general case (finding a duplicate in any possible array). For more specific cases (e.g. arrays containing only strings) you could do better than this.
function hasDuplicate(arr) {
var i = arr.length, j, val;
while (i--) {
val = arr[i];
j = i;
while (j--) {
if (arr[j] === val) {
return true;
}
}
}
return false;
}
There are lots of answers here but not all of them "feel" nice... So I'll throw my hat in.
If you are using lodash:
function containsDuplicates(array) {
return _.uniq(array).length !== array.length;
}
If you can use ES6 Sets, it simply becomes:
function containsDuplicates(array) {
return array.length !== new Set(array).size
}
With vanilla javascript:
function containsDuplicates(array) {
return array
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
However, sometimes you may want to check if the items are duplicated on a certain field.
This is how I'd handle that:
containsDuplicates([{country: 'AU'}, {country: 'UK'}, {country: 'AU'}], 'country')
function containsDuplicates(array, attribute) {
return array
.map(function (item) { return item[attribute] })
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
Loop stops when found first duplicate:
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]]) {
return true;
}
x[arr[i]] = true;
}
return false;
}
Edit (fix 'toString' issue):
function has_duplicates(arr) {
var x = {}, len = arr.length;
for (var i = 0; i < len; i++) {
if (x[arr[i]] === true) {
return true;
}
x[arr[i]] = true;
}
return false;
}
this will correct for case has_duplicates(['toString']); etc..
var index = myArray.indexOf(strElement);
if (index < 0) {
myArray.push(strElement);
console.log("Added Into Array" + strElement);
} else {
console.log("Already Exists at " + index);
}
You can convert the array to to a Set instance, then convert to an array and check if the length is same before and after the conversion.
const hasDuplicates = (array) => {
const arr = ['test0','test2','test0'];
const uniqueItems = new Set(array);
return array.length !== uniqueItems.size();
};
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test0'])}`);
console.log(`Has duplicates : ${hasDuplicates(['test0','test2','test3'])}`);
Sorting is O(n log n) and not O(n). Building a hash map is O(n). It costs more memory than an in-place sort but you asked for the "fastest." (I'm positive this can be optimized but it is optimal up to a constant factor.)
function hasDuplicate(arr) {
var hash = {};
var hasDuplicate = false;
arr.forEach(function(val) {
if (hash[val]) {
hasDuplicate = true;
return;
}
hash[val] = true;
});
return hasDuplicate;
}
It depends on the input array size. I've done some performance tests with Node.js performance hooks and found out that for really small arrays (1,000 to 10,000 entries) Set solution might be faster. But if your array is bigger (like 100,000 elements) plain Object (i. e. hash) solution becomes faster. Here's the code so you can try it out for yourself:
const { performance } = require('perf_hooks');
function objectSolution(nums) {
let testObj = {};
for (var i = 0; i < nums.length; i++) {
let aNum = nums[i];
if (testObj[aNum]) {
return true;
} else {
testObj[aNum] = true;
}
}
return false;
}
function setSolution(nums) {
let testSet = new Set(nums);
return testSet.size !== nums.length;
}
function sortSomeSolution(nums) {
return nums
.sort()
.some(function (item, i, items) {
return item === items[i + 1]
})
}
function runTest(testFunction, testArray) {
console.log(' Running test:', testFunction.name);
let start = performance.now();
let result = testFunction(testArray);
let end = performance.now();
console.log(' Duration:', end - start, 'ms');
}
let arr = [];
let setSize = 100000;
for (var i = 0; i < setSize; i++) {
arr.push(i);
}
console.log('Set size:', setSize);
runTest(objectSolution, arr);
runTest(setSolution, arr);
runTest(sortSomeSolution, arr);
On my Lenovo IdeaPad with i3-8130U Node.js v. 16.6.2 gives me following results for the array of 1,000:
results for the array of 100,000:
Assuming all you want is to detect how many duplicates of 'test0' are in the array. I guess an easy way to do that is to use the join method to transform the array in a string, and then use the match method.
var arr= ['test0','test2','test0'];
var str = arr.join();
console.log(str) //"test0,test2,test0"
var duplicates = str.match(/test0/g);
var duplicateNumber = duplicates.length;
console.log(duplicateNumber); //2