How do I access this data - javascript

I'm currently going through codesmith's CSX and I came across this problem. The idea is to create a function that takes two callbacks as arguments and if any input passed either one of those functions return the function that passed. I can't figure out how to access the input data. Sorry if I'm not clear enough.
function eitherCallback(cb1, cb2) {
// ADD CODE HERE
}
//please try to solve without editing the code underneath
function filterArray(array, callback) {
const newArray = [];
for (let i = 0; i < array.length; i += 1) {
if (callback(array[i], i, array)) newArray.push(array[i]);
}
return newArray;
}
const arrOfNums = [10, 35, 105, 9];
const integerSquareRoot = n => Math.sqrt(n) % 1 === 0;
const over100 = n => n > 100;
const intSqRtOrOver100 = eitherCallback(integerSquareRoot, over100);
console.log(filterArray(arrOfNums, intSqRtOrOver100)); // should log: [105, 9]

You just need to make eitherCallback return a function which takes an array item and tests whether it passes cb1's test or cb2's test:
function eitherCallback(cb1, cb2) {
return item => cb1(item) || cb2(item);
}
//please try to solve without editing the code underneath
function filterArray(array, callback) {
const newArray = [];
for (let i = 0; i < array.length; i += 1) {
if (callback(array[i], i, array)) newArray.push(array[i]);
}
return newArray;
}
const arrOfNums = [10, 35, 105, 9];
const integerSquareRoot = n => Math.sqrt(n) % 1 === 0;
const over100 = n => n > 100;
const intSqRtOrOver100 = eitherCallback(integerSquareRoot, over100);
console.log(filterArray(arrOfNums, intSqRtOrOver100)); // should log: [105, 9]
Another way to think of it - if you have two callbacks and you want to filter an array by whether an item passes either callback, you would do:
arr.filter(
item => cb1(item) || cb2(item)
);
This is the same sort of thing, except that the function is funneled through some slightly confusing levels of additional abstraction.

Related

Combining 2 Callback Functions into One

Here is the question...
"Add code to the function eitherCallback in the place marked "ADD CODE HERE" in order to achieve the desired console logs. The result of using eitherCallback to combine two callbacks into one callback and then passing that one callback into filterArray should match the results of simply passing the two callbacks into eitherFilter in the previous challenge."
Here is the previous challenge's solution which I know works...
function eitherFilter(array, callback1, callback2) {
// ADD CODE HERE
const newArr = [];
for (let i = 0; i < array.length; i++) {
if (callback1(array[i]) || callback2(array[i])) {
newArr.push(array[i]);
}
}
return newArr;
}
// Uncomment these to check your work!
const arrOfNums = [10, 35, 105, 9];
const integerSquareRoot = n => Math.sqrt(n) % 1 === 0;
const over100 = n => n > 100;
console.log(eitherFilter(arrofNums, integerSquareRoot, over100)); // should log: [105, 9]
Here is the code given...
function eitherCallback(callback1, callback2) {
// ADD CODE HERE
}
// Uncomment these to check your work!
function filterArray(array, callback) {
const newArray = [];
for (let i = 0; i < array.length; i += 1) {
if (callback(array[i], i, array)) newArray.push(array[i]);
}
return newArray;
}
const arrOfNums = [10, 35, 105, 9];
const integerSquareRoot = n => Math.sqrt(n) % 1 === 0;
const over100 = n => n > 100;
const intSqRtOrOver100 = eitherCallback(integerSquareRoot, over100);
console.log(filterArray(arrOfNums, intSqRtOver100)); // should log: [105, 9]
I am confused as to what to do. Can anyone give me some tips? I do not know how to even start to answer it!
Thanks in advance...
You don't need much - all you need is to make eitherCallback a higher-order function that takes the two callbacks as initial argument, and executes and returns the logic you're carrying out here:
callback1(array[i]) || callback2(array[i])
Like:
const eitherCallback = (callback1, callback2) => x => callback1(x) || callback2(x);
// Uncomment these to check your work!
function filterArray(array, callback) {
const newArray = [];
for (let i = 0; i < array.length; i += 1) {
if (callback(array[i], i, array)) newArray.push(array[i]);
}
return newArray;
}
const arrOfNums = [10, 35, 105, 9];
const integerSquareRoot = n => Math.sqrt(n) % 1 === 0;
const over100 = n => n > 100;
const intSqRtOrOver100 = eitherCallback(integerSquareRoot, over100);
console.log(filterArray(arrOfNums, intSqRtOrOver100)); // should log: [105, 9]
You could also try:
function eitherCallback(callback1, callback2) {
// ADD CODE HERE
return (element, i, array) => {
//element representing array[i]
return callback1(element, i, array) || callback2(element, i, array);
}
}
The idea is for eitherCallback to return a truly value for either callback called within the function. Also, I believe the "x" in the previous solution from certain performance represents the arguments array[i], i and array. It's just a cleaner way to write it out with x instead of having to repeat it all. Keeping things DRY.

How to remove same value if it has in an array? [duplicate]

Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter with Array#indexOf and Array#lastIndexOf and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map and set the value to false, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce() and then loop over the array again with .filter() to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE).
The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key.
However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/

How to get unique sub arrays within an array in Javascript

So I have this function that determines whether 2 the sum of two numbers within an array are equal to a given target number.
The function gives out all the possible combinations of achieving the target number but I only want unique sub-arrays.
let targetNum = 10
const array = [5,2,3,3,7,1,5]
const sumTwoNums = (array,num) => {
let newArray = [];
for (var i=0;i<array.length;i++) {
for (var j=0;j<array.length;j++) {
if(i!==j && array[i]+array[j]===num){
newArray.push([array[i],array[j]]);
}
}
}
return newArray
}// returns [[5,5],[3,7],[3,7],[7,3],[7,3],[5,5]]
What can I do to solve this issue?
I think this answer solves your problem .
function allPairs(ints, s) {
var hash = Object.create(null),
i,
value,
pairs = [];
for (i = 0; i < ints.length; i++) {
value = ints[i];
if (hash[value]) pairs.push([s - value, value]);
hash[s - value] = true;
}
return pairs;
}
console.log(allPairs([7, 2, 5, 8, 4, 3], 7));

Completely removing duplicate items from an array

Let's assume that I have ;
var array = [1,2,3,4,4,5,5];
I want it to be;
var newArray = [1,2,3];
I want to remove the duplicates completely rather than keeping them as unique values. Is there a way achieve that through reduce method ?
You could use Array#filter with Array#indexOf and Array#lastIndexOf and return only the values which share the same index.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(function (v, _, a) {
return a.indexOf(v) === a.lastIndexOf(v);
});
console.log(result);
Another approach by taking a Map and set the value to false, if a key has been seen before. Then filter the array by taking the value of the map.
var array = [1, 2, 3, 4, 4, 5, 5],
result = array.filter(
Map.prototype.get,
array.reduce((m, v) => m.set(v, !m.has(v)), new Map)
);
console.log(result);
I guess it won't have some remarkable performance, but I like the idea.
var array = [1,2,3,4,4,5,5],
res = array.reduce(function(s,a) {
if (array.filter(v => v !== a).length == array.length-1) {
s.push(a);
}
return s;
}, []);
console.log(res);
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
function nukeDuplications(arr) {
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty+1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
var array = [1,2,3,4,4,5,5];
console.log(nukeDuplications(array));
A slightly more efficient solution would be to loop over the array 1 time and count the number of occurrences in each value and store them in an object using .reduce() and then loop over the array again with .filter() to only return items that occurred 1 time.
This method will also preserve the order of the array, as it merely uses the object keys as references - it does not iterate over them.
var array = [1,2,3,4,4,5,5];
var valueCounts = array.reduce((result, item) => {
if (!result[item]) {
result[item] = 0;
}
result[item]++;
return result;
}, {});
var unique = array.filter(function (elem) {
return !valueCounts[elem] || valueCounts[elem] <= 1;
});
console.log(unique)
Another option is to use an object to track how many times an element is used. This will destroy the array order, but it should be much faster on very large arrays.
// Both versions destroy array order.
// ES6 version
function nukeDuplications(arr) {
"use strict";
const hash = {};
arr.forEach(el => {
const qty = hash[el] || 0;
hash[el] = qty + 1;
});
const ret = [];
Object.keys(hash).forEach(key => {
if (hash[key] === 1) {
ret.push(Number(key));
}
})
return ret;
}
// ES5 version
function nukeDuplicationsEs5(arr) {
"use strict";
var hash = {};
for (var i = 0; i < arr.length; i++) {
var el = arr[i];
var qty = hash[el] || 0;
hash[el] = qty + 1;
};
var ret = [];
for (let key in hash) {
if (hash.hasOwnProperty(key)) {
if (hash[key] === 1) {
ret.push(Number(key));
}
}
}
return ret;
}
var array = [1, 2, 3, 4, 4, 5, 5];
console.log(nukeDuplications(array));
console.log(nukeDuplicationsEs5(array));
There are a lot of over-complicated, and slow running code here. Here's my solution:
let numbers = [1,2,3,4,4,4,4,5,5]
let filtered = []
numbers.map((n) => {
if(numbers.indexOf(n) === numbers.lastIndexOf(n)) // If only 1 instance of n
filtered.push(n)
})
console.log(filtered)
you can use this function:
function isUniqueInArray(array, value) {
let counter = 0;
for (let index = 0; index < array.length; index++) {
if (array[index] === value) {
counter++;
}
}
if (counter === 0) {
return null;
}
return counter === 1 ? true : false;
}
const array = [1,2,3,4,4,5,5];
let uniqueValues = [];
array.forEach(element => {
if(isUniqueInArray(array ,element)){
uniqueValues.push(element);
}
});
console.log(`the unique values is ${uniqueValues}`);
If its help you, you can install the isUniqueInArray function from my package https://www.npmjs.com/package/jotils or directly from bit https://bit.dev/joshk/jotils/is-unique-in-array.
My answer is used map and filter as below:
x = [1,2,3,4,2,3]
x.map(d => x.filter(i => i == d).length < 2 ? d : null).filter(d => d != null)
// [1, 4]
Object.values is supported since ES2017 (Needless to say - not on IE).
The accumulator is an object for which each key is a value, so duplicates are removed as they override the same key.
However, this solution can be risky with misbehaving values (null, undefined etc.), but maybe useful for real life scenarios.
let NukeDeps = (arr) => {
return Object.values(arr.reduce((curr, i) => {
curr[i] = i;
return curr;
}, {}))
}
I would like to answer my questions with an answer I came up with upon reading it again
const array = [1, 2, 3, 4, 4, 5, 5];
const filtered = array.filter(item => {
const { length } = array.filter(currentItem => currentItem === item)
if (length === 1) {
return true;
}
});
console.log(filtered)
//Try with this code
var arr = [1,2, 3,3,4,5,5,5,6,6];
arr = arr.filter( function( item, index, inputArray ) {
return inputArray.indexOf(item) == index;
});
Also look into this link https://fiddle.jshell.net/5hshjxvr/

Compare multiple arrays for common values [duplicate]

What's the simplest, library-free code for implementing array intersections in javascript? I want to write
intersection([1,2,3], [2,3,4,5])
and get
[2, 3]
Use a combination of Array.prototype.filter and Array.prototype.includes:
const filteredArray = array1.filter(value => array2.includes(value));
For older browsers, with Array.prototype.indexOf and without an arrow function:
var filteredArray = array1.filter(function(n) {
return array2.indexOf(n) !== -1;
});
NB! Both .includes and .indexOf internally compares elements in the array by using ===, so if the array contains objects it will only compare object references (not their content). If you want to specify your own comparison logic, use Array.prototype.some instead.
Destructive seems simplest, especially if we can assume the input is sorted:
/* destructively finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
* State of input arrays is undefined when
* the function returns. They should be
* (prolly) be dumped.
*
* Should have O(n) operations, where n is
* n = MIN(a.length, b.length)
*/
function intersection_destructive(a, b)
{
var result = [];
while( a.length > 0 && b.length > 0 )
{
if (a[0] < b[0] ){ a.shift(); }
else if (a[0] > b[0] ){ b.shift(); }
else /* they're equal */
{
result.push(a.shift());
b.shift();
}
}
return result;
}
Non-destructive has to be a hair more complicated, since we’ve got to track indices:
/* finds the intersection of
* two arrays in a simple fashion.
*
* PARAMS
* a - first array, must already be sorted
* b - second array, must already be sorted
*
* NOTES
*
* Should have O(n) operations, where n is
* n = MIN(a.length(), b.length())
*/
function intersect_safe(a, b)
{
var ai=0, bi=0;
var result = [];
while( ai < a.length && bi < b.length )
{
if (a[ai] < b[bi] ){ ai++; }
else if (a[ai] > b[bi] ){ bi++; }
else /* they're equal */
{
result.push(a[ai]);
ai++;
bi++;
}
}
return result;
}
If your environment supports ECMAScript 6 Set, one simple and supposedly efficient (see specification link) way:
function intersect(a, b) {
var setA = new Set(a);
var setB = new Set(b);
var intersection = new Set([...setA].filter(x => setB.has(x)));
return Array.from(intersection);
}
Shorter, but less readable (also without creating the additional intersection Set):
function intersect(a, b) {
var setB = new Set(b);
return [...new Set(a)].filter(x => setB.has(x));
}
Note that when using sets you will only get distinct values, thus new Set([1, 2, 3, 3]).size evaluates to 3.
Using Underscore.js or lodash.js
_.intersection( [0,345,324] , [1,0,324] ) // gives [0,324]
// Return elements of array a that are also in b in linear time:
function intersect(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
// Example:
console.log(intersect([1,2,3], [2,3,4,5]));
I recommend above succinct solution which outperforms other implementations on large inputs. If performance on small inputs matters, check the alternatives below.
Alternatives and performance comparison:
See the following snippet for alternative implementations and check https://jsperf.com/array-intersection-comparison for performance comparisons.
function intersect_for(a, b) {
const result = [];
const alen = a.length;
const blen = b.length;
for (let i = 0; i < alen; ++i) {
const ai = a[i];
for (let j = 0; j < blen; ++j) {
if (ai === b[j]) {
result.push(ai);
break;
}
}
}
return result;
}
function intersect_filter_indexOf(a, b) {
return a.filter(el => b.indexOf(el) !== -1);
}
function intersect_filter_in(a, b) {
const map = b.reduce((map, el) => {map[el] = true; return map}, {});
return a.filter(el => el in map);
}
function intersect_for_in(a, b) {
const result = [];
const map = {};
for (let i = 0, length = b.length; i < length; ++i) {
map[b[i]] = true;
}
for (let i = 0, length = a.length; i < length; ++i) {
if (a[i] in map) result.push(a[i]);
}
return result;
}
function intersect_filter_includes(a, b) {
return a.filter(el => b.includes(el));
}
function intersect_filter_has_this(a, b) {
return a.filter(Set.prototype.has, new Set(b));
}
function intersect_filter_has_arrow(a, b) {
const set = new Set(b);
return a.filter(el => set.has(el));
}
function intersect_for_has(a, b) {
const result = [];
const set = new Set(b);
for (let i = 0, length = a.length; i < length; ++i) {
if (set.has(a[i])) result.push(a[i]);
}
return result;
}
Results in Firefox 53:
Ops/sec on large arrays (10,000 elements):
filter + has (this) 523 (this answer)
for + has 482
for-loop + in 279
filter + in 242
for-loops 24
filter + includes 14
filter + indexOf 10
Ops/sec on small arrays (100 elements):
for-loop + in 384,426
filter + in 192,066
for-loops 159,137
filter + includes 104,068
filter + indexOf 71,598
filter + has (this) 43,531 (this answer)
filter + has (arrow function) 35,588
My contribution in ES6 terms. In general it finds the intersection of an array with indefinite number of arrays provided as arguments.
Array.prototype.intersect = function(...a) {
return [this,...a].reduce((p,c) => p.filter(e => c.includes(e)));
}
var arrs = [[0,2,4,6,8],[4,5,6,7],[4,6]],
arr = [0,1,2,3,4,5,6,7,8,9];
document.write("<pre>" + JSON.stringify(arr.intersect(...arrs)) + "</pre>");
How about just using associative arrays?
function intersect(a, b) {
var d1 = {};
var d2 = {};
var results = [];
for (var i = 0; i < a.length; i++) {
d1[a[i]] = true;
}
for (var j = 0; j < b.length; j++) {
d2[b[j]] = true;
}
for (var k in d1) {
if (d2[k])
results.push(k);
}
return results;
}
edit:
// new version
function intersect(a, b) {
var d = {};
var results = [];
for (var i = 0; i < b.length; i++) {
d[b[i]] = true;
}
for (var j = 0; j < a.length; j++) {
if (d[a[j]])
results.push(a[j]);
}
return results;
}
The performance of #atk's implementation for sorted arrays of primitives can be improved by using .pop rather than .shift.
function intersect(array1, array2) {
var result = [];
// Don't destroy the original arrays
var a = array1.slice(0);
var b = array2.slice(0);
var aLast = a.length - 1;
var bLast = b.length - 1;
while (aLast >= 0 && bLast >= 0) {
if (a[aLast] > b[bLast] ) {
a.pop();
aLast--;
} else if (a[aLast] < b[bLast] ){
b.pop();
bLast--;
} else /* they're equal */ {
result.push(a.pop());
b.pop();
aLast--;
bLast--;
}
}
return result;
}
I created a benchmark using jsPerf. It's about three times faster to use .pop.
If you need to have it handle intersecting multiple arrays:
const intersect = (a1, a2, ...rest) => {
const a12 = a1.filter(value => a2.includes(value))
if (rest.length === 0) { return a12; }
return intersect(a12, ...rest);
};
console.log(intersect([1,2,3,4,5], [1,2], [1, 2, 3,4,5], [2, 10, 1]))
Sort it
check one by one from the index 0, create new array from that.
Something like this, Not tested well though.
function intersection(x,y){
x.sort();y.sort();
var i=j=0;ret=[];
while(i<x.length && j<y.length){
if(x[i]<y[j])i++;
else if(y[j]<x[i])j++;
else {
ret.push(x[i]);
i++,j++;
}
}
return ret;
}
alert(intersection([1,2,3], [2,3,4,5]));
PS:The algorithm only intended for Numbers and Normal Strings, intersection of arbitary object arrays may not work.
Using jQuery:
var a = [1,2,3];
var b = [2,3,4,5];
var c = $(b).not($(b).not(a));
alert(c);
A tiny tweak to the smallest one here (the filter/indexOf solution), namely creating an index of the values in one of the arrays using a JavaScript object, will reduce it from O(N*M) to "probably" linear time. source1 source2
function intersect(a, b) {
var aa = {};
a.forEach(function(v) { aa[v]=1; });
return b.filter(function(v) { return v in aa; });
}
This isn't the very simplest solution (it's more code than filter+indexOf), nor is it the very fastest (probably slower by a constant factor than intersect_safe()), but seems like a pretty good balance. It is on the very simple side, while providing good performance, and it doesn't require pre-sorted inputs.
For arrays containing only strings or numbers you can do something with sorting, as per some of the other answers. For the general case of arrays of arbitrary objects I don't think you can avoid doing it the long way. The following will give you the intersection of any number of arrays provided as parameters to arrayIntersection:
var arrayContains = Array.prototype.indexOf ?
function(arr, val) {
return arr.indexOf(val) > -1;
} :
function(arr, val) {
var i = arr.length;
while (i--) {
if (arr[i] === val) {
return true;
}
}
return false;
};
function arrayIntersection() {
var val, arrayCount, firstArray, i, j, intersection = [], missing;
var arrays = Array.prototype.slice.call(arguments); // Convert arguments into a real array
// Search for common values
firstArray = arrays.pop();
if (firstArray) {
j = firstArray.length;
arrayCount = arrays.length;
while (j--) {
val = firstArray[j];
missing = false;
// Check val is present in each remaining array
i = arrayCount;
while (!missing && i--) {
if ( !arrayContains(arrays[i], val) ) {
missing = true;
}
}
if (!missing) {
intersection.push(val);
}
}
}
return intersection;
}
arrayIntersection( [1, 2, 3, "a"], [1, "a", 2], ["a", 1] ); // Gives [1, "a"];
Simplest, fastest O(n) and shortest way:
function intersection (a, b) {
const setA = new Set(a);
return b.filter(value => setA.has(value));
}
console.log(intersection([1,2,3], [2,3,4,5]))
#nbarbosa has almost the same answer but he cast both arrays to Set and then back to array. There is no need for any extra casting.
Another indexed approach able to process any number of arrays at once:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = 0;
index[v]++;
};
};
var retv = [];
for (var i in index) {
if (index[i] == arrLength) retv.push(i);
};
return retv;
};
It works only for values that can be evaluated as strings and you should pass them as an array like:
intersect ([arr1, arr2, arr3...]);
...but it transparently accepts objects as parameter or as any of the elements to be intersected (always returning array of common values). Examples:
intersect ({foo: [1, 2, 3, 4], bar: {a: 2, j:4}}); // [2, 4]
intersect ([{x: "hello", y: "world"}, ["hello", "user"]]); // ["hello"]
EDIT: I just noticed that this is, in a way, slightly buggy.
That is: I coded it thinking that input arrays cannot itself contain repetitions (as provided example doesn't).
But if input arrays happen to contain repetitions, that would produce wrong results. Example (using below implementation):
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]);
// Expected: [ '1' ]
// Actual: [ '1', '3' ]
Fortunately this is easy to fix by simply adding second level indexing. That is:
Change:
if (index[v] === undefined) index[v] = 0;
index[v]++;
by:
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
...and:
if (index[i] == arrLength) retv.push(i);
by:
if (Object.keys(index[i]).length == arrLength) retv.push(i);
Complete example:
// Calculate intersection of multiple array or object values.
function intersect (arrList) {
var arrLength = Object.keys(arrList).length;
// (Also accepts regular objects as input)
var index = {};
for (var i in arrList) {
for (var j in arrList[i]) {
var v = arrList[i][j];
if (index[v] === undefined) index[v] = {};
index[v][i] = true; // Mark as present in i input.
};
};
var retv = [];
for (var i in index) {
if (Object.keys(index[i]).length == arrLength) retv.push(i);
};
return retv;
};
intersect ([[1, 3, 4, 6, 3], [1, 8, 99]]); // [ '1' ]
With some restrictions on your data, you can do it in linear time!
For positive integers: use an array mapping the values to a "seen/not seen" boolean.
function intersectIntegers(array1,array2) {
var seen=[],
result=[];
for (var i = 0; i < array1.length; i++) {
seen[array1[i]] = true;
}
for (var i = 0; i < array2.length; i++) {
if ( seen[array2[i]])
result.push(array2[i]);
}
return result;
}
There is a similar technique for objects: take a dummy key, set it to "true" for each element in array1, then look for this key in elements of array2. Clean up when you're done.
function intersectObjects(array1,array2) {
var result=[];
var key="tmpKey_intersect"
for (var i = 0; i < array1.length; i++) {
array1[i][key] = true;
}
for (var i = 0; i < array2.length; i++) {
if (array2[i][key])
result.push(array2[i]);
}
for (var i = 0; i < array1.length; i++) {
delete array1[i][key];
}
return result;
}
Of course you need to be sure the key didn't appear before, otherwise you'll be destroying your data...
function intersection(A,B){
var result = new Array();
for (i=0; i<A.length; i++) {
for (j=0; j<B.length; j++) {
if (A[i] == B[j] && $.inArray(A[i],result) == -1) {
result.push(A[i]);
}
}
}
return result;
}
For simplicity:
// Usage
const intersection = allLists
.reduce(intersect, allValues)
.reduce(removeDuplicates, []);
// Implementation
const intersect = (intersection, list) =>
intersection.filter(item =>
list.some(x => x === item));
const removeDuplicates = (uniques, item) =>
uniques.includes(item) ? uniques : uniques.concat(item);
// Example Data
const somePeople = [bob, doug, jill];
const otherPeople = [sarah, bob, jill];
const morePeople = [jack, jill];
const allPeople = [...somePeople, ...otherPeople, ...morePeople];
const allGroups = [somePeople, otherPeople, morePeople];
// Example Usage
const intersection = allGroups
.reduce(intersect, allPeople)
.reduce(removeDuplicates, []);
intersection; // [jill]
Benefits:
dirt simple
data-centric
works for arbitrary number of lists
works for arbitrary lengths of lists
works for arbitrary types of values
works for arbitrary sort order
retains shape (order of first appearance in any array)
exits early where possible
memory safe, short of tampering with Function / Array prototypes
Drawbacks:
higher memory usage
higher CPU usage
requires an understanding of reduce
requires understanding of data flow
You wouldn't want to use this for 3D engine or kernel work, but if you have problems getting this to run in an event-based app, your design has bigger problems.
I'll contribute with what has been working out best for me:
if (!Array.prototype.intersect){
Array.prototype.intersect = function (arr1) {
var r = [], o = {}, l = this.length, i, v;
for (i = 0; i < l; i++) {
o[this[i]] = true;
}
l = arr1.length;
for (i = 0; i < l; i++) {
v = arr1[i];
if (v in o) {
r.push(v);
}
}
return r;
};
}
A functional approach with ES2015
A functional approach must consider using only pure functions without side effects, each of which is only concerned with a single job.
These restrictions enhance the composability and reusability of the functions involved.
// small, reusable auxiliary functions
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const apply = f => x => f(x);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// run it
console.log( intersect(xs) (ys) );
Please note that the native Set type is used, which has an advantageous
lookup performance.
Avoid duplicates
Obviously repeatedly occurring items from the first Array are preserved, while the second Array is de-duplicated. This may be or may be not the desired behavior. If you need a unique result just apply dedupe to the first argument:
// auxiliary functions
const apply = f => x => f(x);
const comp = f => g => x => f(g(x));
const afrom = apply(Array.from);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// de-duplication
const dedupe = comp(afrom) (createSet);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
// unique result
console.log( intersect(dedupe(xs)) (ys) );
Compute the intersection of any number of Arrays
If you want to compute the intersection of an arbitrarily number of Arrays just compose intersect with foldl. Here is a convenience function:
// auxiliary functions
const apply = f => x => f(x);
const uncurry = f => (x, y) => f(x) (y);
const createSet = xs => new Set(xs);
const filter = f => xs => xs.filter(apply(f));
const foldl = f => acc => xs => xs.reduce(uncurry(f), acc);
// intersection
const intersect = xs => ys => {
const zs = createSet(ys);
return filter(x => zs.has(x)
? true
: false
) (xs);
};
// intersection of an arbitrarily number of Arrays
const intersectn = (head, ...tail) => foldl(intersect) (head) (tail);
// mock data
const xs = [1,2,2,3,4,5];
const ys = [0,1,2,3,3,3,6,7,8,9];
const zs = [0,1,2,3,4,5,6];
// run
console.log( intersectn(xs, ys, zs) );
.reduce to build a map, and .filter to find the intersection. delete within the .filter allows us to treat the second array as though it's a unique set.
function intersection (a, b) {
var seen = a.reduce(function (h, k) {
h[k] = true;
return h;
}, {});
return b.filter(function (k) {
var exists = seen[k];
delete seen[k];
return exists;
});
}
I find this approach pretty easy to reason about. It performs in constant time.
I have written an intesection function which can even detect intersection of array of objects based on particular property of those objects.
For instance,
if arr1 = [{id: 10}, {id: 20}]
and arr2 = [{id: 20}, {id: 25}]
and we want intersection based on the id property, then the output should be :
[{id: 20}]
As such, the function for the same (note: ES6 code) is :
const intersect = (arr1, arr2, accessors = [v => v, v => v]) => {
const [fn1, fn2] = accessors;
const set = new Set(arr2.map(v => fn2(v)));
return arr1.filter(value => set.has(fn1(value)));
};
and you can call the function as:
intersect(arr1, arr2, [elem => elem.id, elem => elem.id])
Also note: this function finds intersection considering the first array is the primary array and thus the intersection result will be that of the primary array.
This function avoids the N^2 problem, taking advantage of the power of dictionaries. Loops through each array only once, and a third and shorter loop to return the final result.
It also supports numbers, strings, and objects.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [];
// Returns a unique reference string for the type and value of the element
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
array1.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
}
});
array2.forEach(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
mergedElems[key].inArray2 = true;
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
return result;
}
If there is a special case that cannot be solved, just by modifying the generateStrKey function, it could surely be solved. The trick of this function is that it uniquely represents each different data according to type and value.
This variant has some performance improvements. Avoid loops in case any array is empty. It also starts by walking through the shorter array first, so if it finds all the values of the first array in the second array, exits the loop.
function array_intersect(array1, array2)
{
var mergedElems = {},
result = [],
firstArray, secondArray,
firstN = 0,
secondN = 0;
function generateStrKey(elem) {
var typeOfElem = typeof elem;
if (typeOfElem === 'object') {
typeOfElem += Object.prototype.toString.call(elem);
}
return [typeOfElem, elem.toString(), JSON.stringify(elem)].join('__');
}
// Executes the loops only if both arrays have values
if (array1.length && array2.length)
{
// Begins with the shortest array to optimize the algorithm
if (array1.length < array2.length) {
firstArray = array1;
secondArray = array2;
} else {
firstArray = array2;
secondArray = array1;
}
firstArray.forEach(function(elem) {
var key = generateStrKey(elem);
if (!(key in mergedElems)) {
mergedElems[key] = {elem: elem, inArray2: false};
// Increases the counter of unique values in the first array
firstN++;
}
});
secondArray.some(function(elem) {
var key = generateStrKey(elem);
if (key in mergedElems) {
if (!mergedElems[key].inArray2) {
mergedElems[key].inArray2 = true;
// Increases the counter of matches
secondN++;
// If all elements of first array have coincidence, then exits the loop
return (secondN === firstN);
}
}
});
Object.values(mergedElems).forEach(function(elem) {
if (elem.inArray2) {
result.push(elem.elem);
}
});
}
return result;
}
Here is underscore.js implementation:
_.intersection = function(array) {
if (array == null) return [];
var result = [];
var argsLength = arguments.length;
for (var i = 0, length = array.length; i < length; i++) {
var item = array[i];
if (_.contains(result, item)) continue;
for (var j = 1; j < argsLength; j++) {
if (!_.contains(arguments[j], item)) break;
}
if (j === argsLength) result.push(item);
}
return result;
};
Source: http://underscorejs.org/docs/underscore.html#section-62
Create an Object using one array and loop through the second array to check if the value exists as key.
function intersection(arr1, arr2) {
var myObj = {};
var myArr = [];
for (var i = 0, len = arr1.length; i < len; i += 1) {
if(myObj[arr1[i]]) {
myObj[arr1[i]] += 1;
} else {
myObj[arr1[i]] = 1;
}
}
for (var j = 0, len = arr2.length; j < len; j += 1) {
if(myObj[arr2[j]] && myArr.indexOf(arr2[j]) === -1) {
myArr.push(arr2[j]);
}
}
return myArr;
}
I think using an object internally can help with computations and could be performant too.
// Approach maintains a count of each element and works for negative elements too
function intersect(a,b){
const A = {};
a.forEach((v)=>{A[v] ? ++A[v] : A[v] = 1});
const B = {};
b.forEach((v)=>{B[v] ? ++B[v] : B[v] = 1});
const C = {};
Object.entries(A).map((x)=>C[x[0]] = Math.min(x[1],B[x[0]]))
return Object.entries(C).map((x)=>Array(x[1]).fill(Number(x[0]))).flat();
}
const x = [1,1,-1,-1,0,0,2,2];
const y = [2,0,1,1,1,1,0,-1,-1,-1];
const result = intersect(x,y);
console.log(result); // (7) [0, 0, 1, 1, 2, -1, -1]
I am using map even object could be used.
//find intersection of 2 arrs
const intersections = (arr1,arr2) => {
let arrf = arr1.concat(arr2)
let map = new Map();
let union = [];
for(let i=0; i<arrf.length; i++){
if(map.get(arrf[i])){
map.set(arrf[i],false);
}else{
map.set(arrf[i],true);
}
}
map.forEach((v,k)=>{if(!v){union.push(k);}})
return union;
}
This is a proposed standard: With the currently stage 2 proposal https://github.com/tc39/proposal-set-methods, you could use
mySet.intersection(mySet2);
Until then, you could use Immutable.js's Set, which inspired that proposal
Immutable.Set(mySet).intersect(mySet2)
I extended tarulen's answer to work with any number of arrays. It also should work with non-integer values.
function intersect() {
const last = arguments.length - 1;
var seen={};
var result=[];
for (var i = 0; i < last; i++) {
for (var j = 0; j < arguments[i].length; j++) {
if (seen[arguments[i][j]]) {
seen[arguments[i][j]] += 1;
}
else if (!i) {
seen[arguments[i][j]] = 1;
}
}
}
for (var i = 0; i < arguments[last].length; i++) {
if ( seen[arguments[last][i]] === last)
result.push(arguments[last][i]);
}
return result;
}
If your arrays are sorted, this should run in O(n), where n is min( a.length, b.length )
function intersect_1d( a, b ){
var out=[], ai=0, bi=0, acurr, bcurr, last=Number.MIN_SAFE_INTEGER;
while( ( acurr=a[ai] )!==undefined && ( bcurr=b[bi] )!==undefined ){
if( acurr < bcurr){
if( last===acurr ){
out.push( acurr );
}
last=acurr;
ai++;
}
else if( acurr > bcurr){
if( last===bcurr ){
out.push( bcurr );
}
last=bcurr;
bi++;
}
else {
out.push( acurr );
last=acurr;
ai++;
bi++;
}
}
return out;
}

Categories

Resources