Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter with Array#indexOf and Array#lastIndexOf and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map and set the value to false, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce() and then loop over the array again with .filter() to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE).
The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key.
However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/
Related
It is a simple exercise that I am doing for mere practice and leisure, I have done it in various ways but I was wondering if there is an even more practical way or to reduce the lines of code making use of the many methods of JavaScript.
The exercise is about receiving an array (arr) and a number (target) and returning another array with a pair of numbers found in 'arr' whose sum is equal to 'target'.
function targetSum3(arr, target) {
let newArr = [];
let copyArray = arr;
for (let i of copyArray) {
let x = Math.abs(i - target);
copyArray.pop(copyArray[i]);
if (copyArray.includes(x) && (copyArray.indexOf(x) != copyArray.indexOf(i))) {
newArr.push(i);
newArr.push(x);
return newArr;
}
}
return newArr;
}
If you are fine with a function that just returns a pair of numbers (the first match so to speak) whose sum equals the targets value, this might be enough:
function sumPair (arr, target) {
while(arr.length) {
let sum1 = arr.shift();
let sum2 = arr.find(val => sum1 + val === target);
if (sum2) return [sum2, sum1];
}
return null;
}
const targetSum = (arr, target) => {
const first = arr.find((v,i,a) => arr.includes(target-v) && (arr.indexOf(target-v) !== i));
return first ? [first, target - first] : null;
};
const values = [1,2,3,4,5,6,7,8,9];
console.log(targetSum(values, 1)); // null
console.log(targetSum(values, 2)); // null
console.log(targetSum(values, 3)); // [1, 2]
console.log(targetSum(values, 15)); // [6, 9]
console.log(targetSum(values, 20)); // null
I changed for loop with forEach (more efficient) and there is no need for the copyArray array so I removed it. I also changed pop() with shift(), I think you want to shift the array and not pop-it (if I understand the task correctly).
function targetSum3(arr, target) {
let newArr = [];
arr.forEach(element => {
let x = Math.abs(element - target); // calc x
arr.shift(); // removes first element from arr (current element)
if (arr.includes(x) && (arr.indexOf(x) != arr.indexOf(element))) {
newArr.push(element);
newArr.push(x);
return;
}
});
return newArr;
}
use Array.filter to find the target sum for all values in an given array. See comments in the snippet.
sumsForTargetInArray();
document.addEventListener(`click`,
evt => evt.target.id === `redo` && sumsForTargetInArray());
function sumsInArray(arr, target) {
// clone the array
const clone = arr.slice();
let result = [];
while (clone.length) {
// retrieve the current value (shifting it from the clone)
const current = clone.shift();
// filter arr: all values where value + sum = target
const isTarget = arr.filter(v => current + v === target);
// add to result.
// Sorting is to prevent duplicates later
if (isTarget.length) {
result = [...result, ...isTarget.map(v => [current, v].sort())];
}
}
// weed out duplicates (e.g. 0 + 3, 3 + 0)
const unique = new Set();
result.forEach(r => unique.add(`${r[0]},${r[1]}`));
// return array of array(2)
return [...unique].map(v => v.split(`,`).map(Number));
}
function sumsForTargetInArray() {
const testArr = [...Array(20)].map((_, i) => i);
const target = Math.floor(Math.random() * 30);
document.querySelector(`pre`).textContent = `testArray: ${
JSON.stringify(testArr)}\ntarget: ${target}\nResult: ${
JSON.stringify(sumsInArray(testArr, target))}`;
}
<pre></pre>
<button id="redo">Again</button>
I am trying to find the indexes of all the instances of an element, say, "Nano", in a JavaScript array.
var Cars = ["Nano", "Volvo", "BMW", "Nano", "VW", "Nano"];
I tried jQuery.inArray, or similarly, .indexOf(), but it only gave the index of the last instance of the element, i.e. 5 in this case.
How do I get it for all instances?
The .indexOf() method has an optional second parameter that specifies the index to start searching from, so you can call it in a loop to find all instances of a particular value:
function getAllIndexes(arr, val) {
var indexes = [], i = -1;
while ((i = arr.indexOf(val, i+1)) != -1){
indexes.push(i);
}
return indexes;
}
var indexes = getAllIndexes(Cars, "Nano");
You don't really make it clear how you want to use the indexes, so my function returns them as an array (or returns an empty array if the value isn't found), but you could do something else with the individual index values inside the loop.
UPDATE: As per VisioN's comment, a simple for loop would get the same job done more efficiently, and it is easier to understand and therefore easier to maintain:
function getAllIndexes(arr, val) {
var indexes = [], i;
for(i = 0; i < arr.length; i++)
if (arr[i] === val)
indexes.push(i);
return indexes;
}
Another alternative solution is to use Array.prototype.reduce():
["Nano","Volvo","BMW","Nano","VW","Nano"].reduce(function(a, e, i) {
if (e === 'Nano')
a.push(i);
return a;
}, []); // [0, 3, 5]
N.B.: Check the browser compatibility for reduce method and use polyfill if required.
Another approach using Array.prototype.map() and Array.prototype.filter():
var indices = array.map((e, i) => e === value ? i : '').filter(String)
More simple way with es6 style.
const indexOfAll = (arr, val) => arr.reduce((acc, el, i) => (el === val ? [...acc, i] : acc), []);
//Examples:
var cars = ["Nano", "Volvo", "BMW", "Nano", "VW", "Nano"];
indexOfAll(cars, "Nano"); //[0, 3, 5]
indexOfAll([1, 2, 3, 1, 2, 3], 1); // [0,3]
indexOfAll([1, 2, 3], 4); // []
You can write a simple readable solution to this by using both map and filter:
const nanoIndexes = Cars
.map((car, i) => car === 'Nano' ? i : -1)
.filter(index => index !== -1);
EDIT: If you don't need to support IE/Edge (or are transpiling your code), ES2019 gave us flatMap, which lets you do this in a simple one-liner:
const nanoIndexes = Cars.flatMap((car, i) => car === 'Nano' ? i : []);
I just want to update with another easy method.
You can also use forEach method.
var Cars = ["Nano", "Volvo", "BMW", "Nano", "VW", "Nano"];
var result = [];
Cars.forEach((car, index) => car === 'Nano' ? result.push(index) : null)
Note: MDN gives a method using a while loop:
var indices = [];
var array = ['a', 'b', 'a', 'c', 'a', 'd'];
var element = 'a';
var idx = array.indexOf(element);
while (idx != -1) {
indices.push(idx);
idx = array.indexOf(element, idx + 1);
}
I wouldn't say it's any better than other answers. Just interesting.
const indexes = cars
.map((car, i) => car === "Nano" ? i : null)
.filter(i => i !== null)
This worked for me:
let array1 = [5, 12, 8, 130, 44, 12, 45, 12, 56];
let numToFind = 12
let indexesOf12 = [] // the number whose occurrence in the array we want to find
array1.forEach(function(elem, index, array) {
if (elem === numToFind) {indexesOf12.push(index)}
return indexesOf12
})
console.log(indexesOf12) // outputs [1, 5, 7]
Just to share another method, you can use Function Generators to achieve the result as well:
function findAllIndexOf(target, needle) {
return [].concat(...(function*(){
for (var i = 0; i < target.length; i++) if (target[i] === needle) yield [i];
})());
}
var target = "hellooooo";
var target2 = ['w','o',1,3,'l','o'];
console.log(findAllIndexOf(target, 'o'));
console.log(findAllIndexOf(target2, 'o'));
["a", "b", "a", "b"]
.map((val, index) => ({ val, index }))
.filter(({val, index}) => val === "a")
.map(({val, index}) => index)
=> [0, 2]
You can use Polyfill
if (!Array.prototype.filterIndex)
{
Array.prototype.filterIndex = function (func, thisArg) {
'use strict';
if (!((typeof func === 'Function' || typeof func === 'function') && this))
throw new TypeError();
let len = this.length >>> 0,
res = new Array(len), // preallocate array
t = this, c = 0, i = -1;
let kValue;
if (thisArg === undefined) {
while (++i !== len) {
// checks to see if the key was set
if (i in this) {
kValue = t[i]; // in case t is changed in callback
if (func(t[i], i, t)) {
res[c++] = i;
}
}
}
}
else {
while (++i !== len) {
// checks to see if the key was set
if (i in this) {
kValue = t[i];
if (func.call(thisArg, t[i], i, t)) {
res[c++] = i;
}
}
}
}
res.length = c; // shrink down array to proper size
return res;
};
}
Use it like this:
[2,23,1,2,3,4,52,2].filterIndex(element => element === 2)
result: [0, 3, 7]
findIndex retrieves only the first index which matches callback output. You can implement your own findIndexes by extending Array , then casting your arrays to the new structure .
class EnhancedArray extends Array {
findIndexes(where) {
return this.reduce((a, e, i) => (where(e, i) ? a.concat(i) : a), []);
}
}
/*----Working with simple data structure (array of numbers) ---*/
//existing array
let myArray = [1, 3, 5, 5, 4, 5];
//cast it :
myArray = new EnhancedArray(...myArray);
//run
console.log(
myArray.findIndexes((e) => e===5)
)
/*----Working with Array of complex items structure-*/
let arr = [{name: 'Ahmed'}, {name: 'Rami'}, {name: 'Abdennour'}];
arr= new EnhancedArray(...arr);
console.log(
arr.findIndexes((o) => o.name.startsWith('A'))
)
We can use Stack and push "i" into the stack every time we encounter the condition "arr[i]==value"
Check this:
static void getindex(int arr[], int value)
{
Stack<Integer>st= new Stack<Integer>();
int n= arr.length;
for(int i=n-1; i>=0 ;i--)
{
if(arr[i]==value)
{
st.push(i);
}
}
while(!st.isEmpty())
{
System.out.println(st.peek()+" ");
st.pop();
}
}
When both parameter passed as array
function getIndexes(arr, val) {
var indexes = [], i;
for(i = 0; i < arr.length; i++){
for(j =0; j< val.length; j++) {
if (arr[i] === val[j])
indexes.push(i);
}
}
return indexes;
}
Also, findIndex() will be useful:
var cars = ['Nano', 'Volvo', 'BMW', 'Nano', 'VW', 'Nano'];
const indexes = [];
const searchedItem = 'NaNo';
cars.findIndex((value, index) => {
if (value.toLowerCase() === searchedItem.toLowerCase()) {
indexes.push(index);
}
});
console.log(indexes); //[ 0, 3, 5 ]
Bonus:
This custom solution using Object.entries() and forEach()
var cars = ['Nano', 'Volvo', 'BMW', 'Nano', 'VW', 'Nano'];
const indexes = [];
const searchableItem = 'Nano';
Object.entries(cars).forEach((item, index) => {
if (item[1].toLowerCase() === searchableItem.toLowerCase())
indexes.push(index);
});
console.log(indexes);
Note: I did not run run all tests
Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter with Array#indexOf and Array#lastIndexOf and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map and set the value to false, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce() and then loop over the array again with .filter() to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE).
The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key.
However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/
I have an array of objects, and want to:
Remove certain objects from the array
Treat the removed objects in a second step
I don't know in advance where these objects are. To recognize them, I need to use a function that queries their properties. It makes sense to retrieve the removed objects in a second array.
I had hoped to find a native method like filter or splice that would do this. Here's what I've come up with as a solution:
if (!Array.prototype.cherrypick) {
Array.prototype.cherrypick = function(fn) {
let basket = []
let ii = this.length
let item
for ( ; ii-- ; ) {
item = this[ii]
if (fn(item)) {
basket.unshift(item)
this.splice(ii, 1)
}
}
return basket
}
}
Have I missed something? Is there a native method that does this already? Is my solution unsound in some way?
Have I missed something? Is there a native method that does this already?
No, most native utility methods try not to mutate the array and instead return a new one.
Is my solution unsound in some way?
Using splice and unshift repeatedly like you do is very inefficient. Better write
if (typeof Array.prototype.cherrypick == "function")
console.warn("something already defines Array#cherrypick!");
Array.prototype.cherrypick = function(predicate) {
let removed = [];
for (let i=0, j=0; i<this.length; i++) {
const item = this[i];
if (fn(item)) {
removed.push(item);
} else {
this[j++] = item; // keep in array, but at new position
}
}
this.length = j; // removes rest
return removed;
};
Methods such as Array.filter() returns a new array instead of changing the original array.
You can create a partition method using Array.reduce() that will return two arrays - those that passed the predicate, and those that failed:
const partition = (predicate, arr) =>
arr.reduce((r, o) => {
r[+!!predicate(o)].push(o);
return r;
}, [[], []]);
const arr = [4, 8, 3, 10, 12];
const result = partition(n => n > 5, arr);
console.log(result);
And you can use the partition logic with Array.splice() to create the cherrypick method:
if (!Array.prototype.cherrypick) {
Array.prototype.cherrypick = function(predicate) {
const [removedItems, items] = arr.reduce((r, o) => {
r[+!!predicate(o)].push(o);
return r;
}, [[], []]);
this.splice(0, arr.length, items);
return removedItems;
}
}
const arr = [4, 8, 3, 10, 12];
const removed = arr.cherrypick(n => n > 5);
console.log('arr ', arr);
console.log('removed ', removed);
Just filter twice:
const picked = array.filter(fn);
array = array.filter((el, i, a) => !fn(el, i, a));
Use reduce as follows :
array = [1,2,3,4,5,6,7];
fn = n => n % 3 == 0;
[array, picked] = array.reduce ( (r, el) => (r[+fn(el)].push (el), r), [[], []] )
Do you want something like this?
const basket = ['apple', 'banana', 'car'];
const filterMapBasket = basket
.filter(item => item !== 'car')
.map(item => {return { name: item }});
This will result the initial basket array of strings to be filtered and transformed to an array of objects.
This will alter the source array in place removing items meeting some test, and return those items...
Array.prototype.removeIf = function(fn) {
let i = this.length;
let removed = [];
while (i--) {
if (fn(this[i], i)) {
removed.push(...this.splice(i, 1));
}
}
return removed;
};
let a = [0,1,2,3,4,5];
let removed = a.removeIf(i => i%2);
console.log(a);
console.log(removed);
What's the simplest, library-free code for implementing array intersections in javascript? I want to write
intersection([1,2,3], [2,3,4,5])
and get
[2, 3]
Use a combination of Array.prototype.filter and Array.prototype.includes:
const filteredArray = array1.filter(value => array2.includes(value));
For older browsers, with Array.prototype.indexOf and without an arrow function:
var filteredArray = array1.filter(function(n) {
return array2.indexOf(n) !== -1;
});
NB! Both .includes and .indexOf internally compares elements in the array by using ===, so if the array contains objects it will only compare object references (not their content). If you want to specify your own comparison logic, use Array.prototype.some instead.
Destructive seems simplest, especially if we can assume the input is sorted:
/* destructively finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
* State of input arrays is undefined when
* the function returns. They should be
* (prolly) be dumped.
*
* Should have O(n) operations, where n is
* n = MIN(a.length, b.length)
*/
function intersection_destructive(a, b)
{
var result = [];
while( a.length > 0 && b.length > 0 )
{
if (a[0] < b[0] ){ a.shift(); }
else if (a[0] > b[0] ){ b.shift(); }
else /* they're equal */
{
result.push(a.shift());
b.shift();
}
}
return result;
}
Non-destructive has to be a hair more complicated, since we’ve got to track indices:
/* finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
*
* Should have O(n) operations, where n is
* n = MIN(a.length(), b.length())
*/
function intersect_safe(a, b)
{
var ai=0, bi=0;
var result = [];
while( ai < a.length && bi < b.length )
{
if (a[ai] < b[bi] ){ ai++; }
else if (a[ai] > b[bi] ){ bi++; }
else /* they're equal */
{
result.push(a[ai]);
ai++;
bi++;
}
}
return result;
}
If your environment supports ECMAScript 6 Set, one simple and supposedly efficient (see specification link) way:
function intersect(a, b) {
var setA = new Set(a);
var setB = new Set(b);
var intersection = new Set([...setA].filter(x => setB.has(x)));
return Array.from(intersection);
}
Shorter, but less readable (also without creating the additional intersection Set):
function intersect(a, b) {
var setB = new Set(b);
return [...new Set(a)].filter(x => setB.has(x));
}
Note that when using sets you will only get distinct values, thus new Set([1, 2, 3, 3]).size evaluates to 3.
Using Underscore.js or lodash.js
_.intersection( [0,345,324] , [1,0,324] ) // gives [0,324]
// Return elements of array a that are also in b in linear time:
function intersect(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
// Example:
console.log(intersect([1,2,3], [2,3,4,5]));
I recommend above succinct solution which outperforms other implementations on large inputs. If performance on small inputs matters, check the alternatives below.
Alternatives and performance comparison:
See the following snippet for alternative implementations and check https://jsperf.com/array-intersection-comparison for performance comparisons.
function intersect_for(a, b) {
const result = [];
const alen = a.length;
const blen = b.length;
for (let i = 0; i < alen; ++i) {
const ai = a[i];
for (let j = 0; j < blen; ++j) {
if (ai === b[j]) {
result.push(ai);
break;
}
}
}
return result;
}
function intersect_filter_indexOf(a, b) {
return a.filter(el => b.indexOf(el) !== -1);
}
function intersect_filter_in(a, b) {
const map = b.reduce((map, el) => {map[el] = true; return map}, {});
return a.filter(el => el in map);
}
function intersect_for_in(a, b) {
const result = [];
const map = {};
for (let i = 0, length = b.length; i < length; ++i) {
map[b[i]] = true;
}
for (let i = 0, length = a.length; i < length; ++i) {
if (a[i] in map) result.push(a[i]);
}
return result;
}
function intersect_filter_includes(a, b) {
return a.filter(el => b.includes(el));
}
function intersect_filter_has_this(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
function intersect_filter_has_arrow(a, b) {
const set = new Set(b);
return a.filter(el => set.has(el));
}
function intersect_for_has(a, b) {
const result = [];
const set = new Set(b);
for (let i = 0, length = a.length; i < length; ++i) {
if (set.has(a[i])) result.push(a[i]);
}
return result;
}
Results in Firefox 53:
Ops/sec on large arrays (10,000 elements):
filter + has (this) 523 (this answer)
for + has 482
for-loop + in 279
filter + in 242
for-loops 24
filter + includes 14
filter + indexOf 10
Ops/sec on small arrays (100 elements):
for-loop + in 384,426
filter + in 192,066
for-loops 159,137
filter + includes 104,068
filter + indexOf 71,598
filter + has (this) 43,531 (this answer)
filter + has (arrow function) 35,588
My contribution in ES6 terms. In general it finds the intersection of an array with indefinite number of arrays provided as arguments.
Array.prototype.intersect = function(...a) {
return [this,...a].reduce((p,c) => p.filter(e => c.includes(e)));
}
var arrs = [[0,2,4,6,8],[4,5,6,7],[4,6]],
arr = [0,1,2,3,4,5,6,7,8,9];
document.write("<pre>" + JSON.stringify(arr.intersect(...arrs)) + "</pre>");
How about just using associative arrays?
function intersect(a, b) {
var d1 = {};
var d2 = {};
var results = [];
for (var i = 0; i < a.length; i++) {
d1[a[i]] = true;
}
for (var j = 0; j < b.length; j++) {
d2[b[j]] = true;
}
for (var k in d1) {
if (d2[k])
results.push(k);
}
return results;
}
edit:
// new version
function intersect(a, b) {
var d = {};
var results = [];
for (var i = 0; i < b.length; i++) {
d[b[i]] = true;
}
for (var j = 0; j < a.length; j++) {
if (d[a[j]])
results.push(a[j]);
}
return results;
}
The performance of #atk's implementation for sorted arrays of primitives can be improved by using .pop rather than .shift.
function intersect(array1, array2) {
var result = [];
// Don't destroy the original arrays
var a = array1.slice(0);
var b = array2.slice(0);
var aLast = a.length - 1;
var bLast = b.length - 1;
while (aLast >= 0 && bLast >= 0) {
if (a[aLast] > b[bLast] ) {
a.pop();
aLast--;
} else if (a[aLast] < b[bLast] ){
b.pop();
bLast--;
} else /* they're equal */ {
result.push(a.pop());
b.pop();
aLast--;
bLast--;
}
}
return result;
}
I created a benchmark using jsPerf. It's about three times faster to use .pop.
If you need to have it handle intersecting multiple arrays:
const intersect = (a1, a2, ...rest) => {
const a12 = a1.filter(value => a2.includes(value))
if (rest.length === 0) { return a12; }
return intersect(a12, ...rest);
};
console.log(intersect([1,2,3,4,5], [1,2], [1, 2, 3,4,5], [2, 10, 1]))
Sort it
check one by one from the index 0, create new array from that.
Something like this, Not tested well though.
function intersection(x,y){
x.sort();y.sort();
var i=j=0;ret=[];
while(i<x.length && j<y.length){
if(x[i]<y[j])i++;
else if(y[j]<x[i])j++;
else {
ret.push(x[i]);
i++,j++;
}
}
return ret;
}
alert(intersection([1,2,3], [2,3,4,5]));
PS:The algorithm only intended for Numbers and Normal Strings, intersection of arbitary object arrays may not work.
Using jQuery:
var a = [1,2,3];
var b = [2,3,4,5];
var c = $(b).not($(b).not(a));
alert(c);
A tiny tweak to the smallest one here (the filter/indexOf solution), namely creating an index of the values in one of the arrays using a JavaScript object, will reduce it from O(N*M) to "probably" linear time. source1 source2
function intersect(a, b) {
var aa = {};
a.forEach(function(v) { aa[v]=1; });
return b.filter(function(v) { return v in aa; });
}
This isn't the very simplest solution (it's more code than filter+indexOf), nor is it the very fastest (probably slower by a constant factor than intersect_safe()), but seems like a pretty good balance. It is on the very simple side, while providing good performance, and it doesn't require pre-sorted inputs.
For arrays containing only strings or numbers you can do something with sorting, as per some of the other answers. For the general case of arrays of arbitrary objects I don't think you can avoid doing it the long way. The following will give you the intersection of any number of arrays provided as parameters to arrayIntersection:
var arrayContains = Array.prototype.indexOf ?
function(arr, val) {
return arr.indexOf(val) > -1;
} :
function(arr, val) {
var i = arr.length;
while (i--) {
if (arr[i] === val) {
return true;
}
}
return false;
};
function arrayIntersection() {
var val, arrayCount, firstArray, i, j, intersection = [], missing;
var arrays = Array.prototype.slice.call(arguments); // Convert arguments into a real array
// Search for common values
firstArray = arrays.pop();
if (firstArray) {
j = firstArray.length;
arrayCount = arrays.length;
while (j--) {
val = firstArray[j];
missing = false;
// Check val is present in each remaining array
i = arrayCount;
while (!missing && i--) {
if ( !arrayContains(arrays[i], val) ) {
missing = true;
}
}
if (!missing) {
intersection.push(val);
}
}
}
return intersection;
}
arrayIntersection( [1, 2, 3, "a"], [1, "a", 2], ["a", 1] ); // Gives [1, "a"];
Simplest, fastest O(n) and shortest way:
function intersection (a, b) {
const setA = new Set(a);
return b.filter(value => setA.has(value));
}
console.log(intersection([1,2,3], [2,3,4,5]))
#nbarbosa has almost the same answer but he cast both arrays to Set and then back to array. There is no need for any extra casting.
Another indexed approach able to process any number of arrays at once:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = 0;
index[v]++;
};
};
var retv = [];
for (var i in index) {
if (index[i] == arrLength) retv.push(i);
};
return retv;
};
It works only for values that can be evaluated as strings and you should pass them as an array like:
intersect ([arr1, arr2, arr3...]);
...but it transparently accepts objects as parameter or as any of the elements to be intersected (always returning array of common values). Examples:
intersect ({foo: [1, 2, 3, 4], bar: {a: 2, j:4}}); // [2, 4]
intersect ([{x: "hello", y: "world"}, ["hello", "user"]]); // ["hello"]
EDIT: I just noticed that this is, in a way, slightly buggy.
That is: I coded it thinking that input arrays cannot itself contain repetitions (as provided example doesn't).
But if input arrays happen to contain repetitions, that would produce wrong results. Example (using below implementation):
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]);
// Expected: [ '1' ]
// Actual: [ '1', '3' ]
Fortunately this is easy to fix by simply adding second level indexing. That is:
Change:
if (index[v] === undefined) index[v] = 0;
index[v]++;
by:
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
...and:
if (index[i] == arrLength) retv.push(i);
by:
if (Object.keys(index[i]).length == arrLength) retv.push(i);
Complete example:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
};
};
var retv = [];
for (var i in index) {
if (Object.keys(index[i]).length == arrLength) retv.push(i);
};
return retv;
};
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]); // [ '1' ]
With some restrictions on your data, you can do it in linear time!
For positive integers: use an array mapping the values to a "seen/not seen" boolean.
function intersectIntegers(array1,array2) {
var seen=[],
result=[];
for (var i = 0; i < array1.length; i++) {
seen[array1[i]] = true;
}
for (var i = 0; i < array2.length; i++) {
if ( seen[array2[i]])
result.push(array2[i]);
}
return result;
}
There is a similar technique for objects: take a dummy key, set it to "true" for each element in array1, then look for this key in elements of array2. Clean up when you're done.
function intersectObjects(array1,array2) {
var result=[];
var key="tmpKey_intersect"
for (var i = 0; i < array1.length; i++) {
array1[i][key] = true;
}
for (var i = 0; i < array2.length; i++) {
if (array2[i][key])
result.push(array2[i]);
}
for (var i = 0; i < array1.length; i++) {
delete array1[i][key];
}
return result;
}
Of course you need to be sure the key didn't appear before, otherwise you'll be destroying your data...
function intersection(A,B){
var result = new Array();
for (i=0; i<A.length; i++) {
for (j=0; j<B.length; j++) {
if (A[i] == B[j] && $.inArray(A[i],result) == -1) {
result.push(A[i]);
}
}
}
return result;
}
For simplicity:
// Usage
const intersection = allLists
.reduce(intersect, allValues)
.reduce(removeDuplicates, []);
// Implementation
const intersect = (intersection, list) =>
intersection.filter(item =>
list.some(x => x === item));
const removeDuplicates = (uniques, item) =>
uniques.includes(item) ? uniques : uniques.concat(item);
// Example Data
const somePeople = [bob, doug, jill];
const otherPeople = [sarah, bob, jill];
const morePeople = [jack, jill];
const allPeople = [...somePeople, ...otherPeople, ...morePeople];
const allGroups = [somePeople, otherPeople, morePeople];
// Example Usage
const intersection = allGroups
.reduce(intersect, allPeople)
.reduce(removeDuplicates, []);
intersection; // [jill]
Benefits:
dirt simple
data-centric
works for arbitrary number of lists
works for arbitrary lengths of lists
works for arbitrary types of values
works for arbitrary sort order
retains shape (order of first appearance in any array)
exits early where possible
memory safe, short of tampering with Function / Array prototypes
Drawbacks:
higher memory usage
higher CPU usage
requires an understanding of reduce
requires understanding of data flow
You wouldn't want to use this for 3D engine or kernel work, but if you have problems getting this to run in an event-based app, your design has bigger problems.
I'll contribute with what has been working out best for me:
if (!Array.prototype.intersect){
Array.prototype.intersect = function (arr1) {
var r = [], o = {}, l = this.length, i, v;
for (i = 0; i < l; i++) {
o[this[i]] = true;
}
l = arr1.length;
for (i = 0; i < l; i++) {
v = arr1[i];
if (v in o) {
r.push(v);
}
}
return r;
};
}
A functional approach with ES2015
A functional approach must consider using only pure functions without side effects, each of which is only concerned with a single job.
These restrictions enhance the composability and reusability of the functions involved.
// small, reusable auxiliary functions
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const apply = f => x => f(x);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// run it
console.log( intersect(xs) (ys) );
Please note that the native Set type is used, which has an advantageous
lookup performance.
Avoid duplicates
Obviously repeatedly occurring items from the first Array are preserved, while the second Array is de-duplicated. This may be or may be not the desired behavior. If you need a unique result just apply dedupe to the first argument:
// auxiliary functions
const apply = f => x => f(x);
const comp = f => g => x => f(g(x));
const afrom = apply(Array.from);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// de-duplication
const dedupe = comp(afrom) (createSet);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// unique result
console.log( intersect(dedupe(xs)) (ys) );
Compute the intersection of any number of Arrays
If you want to compute the intersection of an arbitrarily number of Arrays just compose intersect with foldl. Here is a convenience function:
// auxiliary functions
const apply = f => x => f(x);
const uncurry = f => (x, y) => f(x) (y);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const foldl = f => acc => xs => xs.reduce(uncurry(f), acc);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// intersection of an arbitrarily number of Arrays
const intersectn = (head, ...tail) => foldl(intersect) (head) (tail);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
const zs = [0,1,2,3,4,5,6];
// run
console.log( intersectn(xs, ys, zs) );
.reduce to build a map, and .filter to find the intersection. delete within the .filter allows us to treat the second array as though it's a unique set.
function intersection (a, b) {
var seen = a.reduce(function (h, k) {
h[k] = true;
return h;
}, {});
return b.filter(function (k) {
var exists = seen[k];
delete seen[k];
return exists;
});
}
I find this approach pretty easy to reason about. It performs in constant time.
I have written an intesection function which can even detect intersection of array of objects based on particular property of those objects.
For instance,
if arr1 = [{id: 10}, {id: 20}]
and arr2 = [{id: 20}, {id: 25}]
and we want intersection based on the id property, then the output should be :
[{id: 20}]
As such, the function for the same (note: ES6 code) is :
const intersect = (arr1, arr2, accessors = [v => v, v => v]) => {
const [fn1, fn2] = accessors;
const set = new Set(arr2.map(v => fn2(v)));
return arr1.filter(value => set.has(fn1(value)));
};
and you can call the function as:
intersect(arr1, arr2, [elem => elem.id, elem => elem.id])
Also note: this function finds intersection considering the first array is the primary array and thus the intersection result will be that of the primary array.
This function avoids the N^2 problem, taking advantage of the power of dictionaries. Loops through each array only once, and a third and shorter loop to return the final result.
It also supports numbers, strings, and objects.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [];
// Returns a unique reference string for the type and value of the element
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
array1.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
}
});
array2.forEach(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
mergedElems[key].inArray2 = true;
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
return result;
}
If there is a special case that cannot be solved, just by modifying the generateStrKey function, it could surely be solved. The trick of this function is that it uniquely represents each different data according to type and value.
This variant has some performance improvements. Avoid loops in case any array is empty. It also starts by walking through the shorter array first, so if it finds all the values of the first array in the second array, exits the loop.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [],
firstArray, secondArray,
firstN = 0,
secondN = 0;
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
// Executes the loops only if both arrays have values
if (array1.length && array2.length)
{
// Begins with the shortest array to optimize the algorithm
if (array1.length < array2.length) {
firstArray = array1;
secondArray = array2;
} else {
firstArray = array2;
secondArray = array1;
}
firstArray.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
// Increases the counter of unique values in the first array
firstN++;
}
});
secondArray.some(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
if (!mergedElems[key].inArray2) {
mergedElems[key].inArray2 = true;
// Increases the counter of matches
secondN++;
// If all elements of first array have coincidence, then exits the loop
return (secondN === firstN);
}
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
}
return result;
}
Here is underscore.js implementation:
_.intersection = function(array) {
if (array == null) return [];
var result = [];
var argsLength = arguments.length;
for (var i = 0, length = array.length; i < length; i++) {
var item = array[i];
if (_.contains(result, item)) continue;
for (var j = 1; j < argsLength; j++) {
if (!_.contains(arguments[j], item)) break;
}
if (j === argsLength) result.push(item);
}
return result;
};
Source: http://underscorejs.org/docs/underscore.html#section-62
Create an Object using one array and loop through the second array to check if the value exists as key.
function intersection(arr1, arr2) {
var myObj = {};
var myArr = [];
for (var i = 0, len = arr1.length; i < len; i += 1) {
if(myObj[arr1[i]]) {
myObj[arr1[i]] += 1;
} else {
myObj[arr1[i]] = 1;
}
}
for (var j = 0, len = arr2.length; j < len; j += 1) {
if(myObj[arr2[j]] && myArr.indexOf(arr2[j]) === -1) {
myArr.push(arr2[j]);
}
}
return myArr;
}
I think using an object internally can help with computations and could be performant too.
// Approach maintains a count of each element and works for negative elements too
function intersect(a,b){
const A = {};
a.forEach((v)=>{A[v] ? ++A[v] : A[v] = 1});
const B = {};
b.forEach((v)=>{B[v] ? ++B[v] : B[v] = 1});
const C = {};
Object.entries(A).map((x)=>C[x[0]] = Math.min(x[1],B[x[0]]))
return Object.entries(C).map((x)=>Array(x[1]).fill(Number(x[0]))).flat();
}
const x = [1,1,-1,-1,0,0,2,2];
const y = [2,0,1,1,1,1,0,-1,-1,-1];
const result = intersect(x,y);
console.log(result); // (7) [0, 0, 1, 1, 2, -1, -1]
I am using map even object could be used.
//find intersection of 2 arrs
const intersections = (arr1,arr2) => {
let arrf = arr1.concat(arr2)
let map = new Map();
let union = [];
for(let i=0; i<arrf.length; i++){
if(map.get(arrf[i])){
map.set(arrf[i],false);
}else{
map.set(arrf[i],true);
}
}
map.forEach((v,k)=>{if(!v){union.push(k);}})
return union;
}
This is a proposed standard: With the currently stage 2 proposal https://github.com/tc39/proposal-set-methods, you could use
mySet.intersection(mySet2);
Until then, you could use Immutable.js's Set, which inspired that proposal
Immutable.Set(mySet).intersect(mySet2)
I extended tarulen's answer to work with any number of arrays. It also should work with non-integer values.
function intersect() {
const last = arguments.length - 1;
var seen={};
var result=[];
for (var i = 0; i < last; i++) {
for (var j = 0; j < arguments[i].length; j++) {
if (seen[arguments[i][j]]) {
seen[arguments[i][j]] += 1;
}
else if (!i) {
seen[arguments[i][j]] = 1;
}
}
}
for (var i = 0; i < arguments[last].length; i++) {
if ( seen[arguments[last][i]] === last)
result.push(arguments[last][i]);
}
return result;
}
If your arrays are sorted, this should run in O(n), where n is min( a.length, b.length )
function intersect_1d( a, b ){
var out=[], ai=0, bi=0, acurr, bcurr, last=Number.MIN_SAFE_INTEGER;
while( ( acurr=a[ai] )!==undefined && ( bcurr=b[bi] )!==undefined ){
if( acurr < bcurr){
if( last===acurr ){
out.push( acurr );
}
last=acurr;
ai++;
}
else if( acurr > bcurr){
if( last===bcurr ){
out.push( bcurr );
}
last=bcurr;
bi++;
}
else {
out.push( acurr );
last=acurr;
ai++;
bi++;
}
}
return out;
}