I need help with creating a function to return the elements that are only present in one of 3 arrays, for example
let arr1 = ['a', 'b', 'c', 'a', 'b']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a']
In the three arrays above, 'd' and 'f' are found only in one of the arrays (arr2 and arr3), I need to return them.
['d','f']
The arrays can be of different sizes and the returned elements must not be duplicated.
I tried to find better alternatives, but I failed and just went with the brute force approach, looping through each array and checking if the element exists in the other two arrays, but obviously, it's really slow and hard to read.
function elementsInOnlyOneArr(a1, a2, a3) {
let myArr = [];
for(let el of a1){
if(a2.includes(el) == false && a3.includes(el) == false && myArr.includes(el) == false){
myArr.push(el);
}
}
for(let el of a2){
if(a1.includes(el) == false && a3.includes(el) == false && myArr.includes(el) == false){
myArr.push(el);
}
}
for(let el of a3){
if(a2.includes(el) == false && a1.includes(el) == false && myArr.includes(el) == false){
myArr.push(el);
}
}
return myArr;
}
Assuming there are less than 32 arrays, you can do this efficiently with bitmaps. Basically, build an index key -> number where the number has the Nth bit set if the key is in the Nth array. Finally return keys whose numbers only have a single bit set (=are powers of two):
function difference(...arrays) {
let items = {}
for (let [n, a] of arrays.entries())
for (let x of a) {
items[x] = (items[x] ?? 0) | (1 << n)
}
return Object.keys(items).filter(x =>
Number.isInteger(Math.log2(items[x])))
}
let arr1 = ['a', 'b', 'c', 'a', 'b', 'z', 'z', 'z']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a']
console.log(difference(arr1, arr2, arr3))
(As noted in the comments x & (x-1) === 0 would be more idiomatic to check whether x is a power of two. See How does the formula x & (x - 1) works? for explanations.)
Here's a more general approach that doesn't limit the number of arrays and doesn't require keys to be strings:
function difference(...arrays) {
let items = new Map
for (let [n, a] of arrays.entries())
for (let x of a) {
if (!items.has(x))
items.set(x, new Set)
items.get(x).add(n)
}
let result = []
for (let [x, ns] of items)
if (ns.size === 1)
result.push(x)
return result
}
let arr1 = ['a', 'b', 'c', 'a', 'b', 'z', 'z', 'z']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a']
console.log(difference(arr1, arr2, arr3))
EDIT: Misunderstood OP and it's not an intersect, but extracting values that are unique (e.g. NOT the intersection) between the individual arrays, for that this might work:
let arr1 = ['a', 'b', 'c', 'a', 'b'];
let arr2 = ['a', 'd', 'b', 'c'];
let arr3 = ['f', 'c', 'a'];
const thereCanOnlyBeOne = function(...arrs) {
return Array.from(
arrs.reduce((map, arr) => {
new Set(arr).forEach((v) => map.set(v, map.has(v) ? map.get(v)+1 : 1));
return map;
}, new Map())
)
.filter(([value, count]) => count === 1)
.map(([value, count]) => value);
};
console.log(thereCanOnlyBeOne(arr1, arr2, arr3));
I would think #gog's answer is way more sophisticated and probably much faster, but i have a slightly hard time wrapping my head around it (call me stupid, i take it =D, EDIT: had to do some research, read/learn something about bitsets here and here), so here's the breakdown of the slightly convoluted way of doing this with a Map and array methods:
pass all arrays to be analyzed into function, order doesn't matter
Loop (i chose reduce, but any loop structure works) trough all input arrays and their values, counting up occurrences in the Map, at the end the Map will look as follows:
0: {"a" => 4}
1: {"b" => 3}
2: {"c" => 3}
3: {"d" => 1}
4: {"f" => 1}
Once done with that, we convert the Map back into an array via Array.from() creating an array of tuples:
[
["a", 4],
["b", 3],
["c", 3],
["d", 1],
["f", 1],
]
Filter that resulting array of tuples (now in the form of [<value>, <count>] to only be left with values that exactly occurred once, leaving us with:
[
["d", 1],
["f", 1],
]
Map over the filtered array to "dumb" it down into a one-dimensional array again and return the result:
["d", "f"]
WARNING: Internally this code does a ****load of loops, so call it a brute-force loop as well, it just looks "shorter" due to "sexy" ES6 array-syntax-sugar.
A slightly modified version for completeness as the Array.filter() step can be omitted (although it seems to be faster) by iterating the counter-Map once it's finalized and simply deleting Map-entries that do not have value 1.
let arr1 = ['a', 'b', 'c', 'a', 'b'];
let arr2 = ['a', 'd', 'b', 'c'];
let arr3 = ['f', 'c', 'a'];
const thereCanOnlyBeOne = function(...arrs) {
let result;
arrs
.reduce((map, arr) => {
new Set(arr).forEach((v) => map.set(v, map.has(v) ? map.get(v)+1 : 1));
return map;
}, new Map())
// the result of .reduce will be a Map!
.forEach((value, key, map) => { value !== 1 && map.delete(key); result = map; });
return Array.from(result).map(([value, count]) => value);
};
console.log(thereCanOnlyBeOne(arr1, arr2, arr3));
UPDATE: as #Nick Parsons pointed out, the previous version of the code would not output elements that were only present in one array, but multiple times.
This will produce an incorrect output if one array contains the same value multiple times and that element isn't present in any other arrays. eg, if you remove b from arr2, then only arr1 has b in it but no others do, so it should b should be included in the final result.
This can easily be solved by turning the array that is checked into a Set() (thereby reducing the arrays values to "unique" ones).
If anyone (besides me) wonders, here's a benchmark between gog's options and mine, his bitset approach is clearly the fastest, so if you are comparing less than 32 arrays, that's the most performant solution by far: https://jsben.ch/YkKSu
and if anyone prefers an ES6-ified version of gog's bitset implementation (improved by #ralphmerridew suggestion), here you go:
let arr1 = ['a', 'b', 'c', 'a', 'b'];
let arr2 = ['a', 'd', 'b', 'c'];
let arr3 = ['f', 'c', 'a'];
function onlyone(...arrays) {
return Object.entries(
arrays.reduce((map, arr, n) => {
arr.forEach((v) => map[v] = (map[v] ?? 0) | (1 << n));
return map;
}, {})
)
.filter(([value, bitmap]) => (bitmap & (bitmap-1)) == 0)
.map(([value, bitmap]) => value);
};
console.log(onlyone(arr1, arr2, arr3));
updated the benchmark with this as well, interestingly (or unexpectedly) this "slower"-looking ES6 implementation somehow beats gog's for-loop implementation by a tad, tested in chrome and firefox multiple times, as i couldn't believe it myself, thought those syntax-sugar methods slow things down slightly compared to for loops, well...good to know =)
I also tried implementing the bitset approach with BigInt() to eliminate the issue with it only being able to deal with 32 arrays (depending on the Engine with BigInt it should be possible to deal with 1 million to 1 billion arrays), unfortunately that seems to make it the slowest of all solutions (benchmark updated):
let arr1 = ['a', 'b', 'c', 'a', 'b'];
let arr2 = ['a', 'd', 'b', 'c'];
let arr3 = ['f', 'c', 'a'];
function onlyoneBigInt(...arrays) {
return Object.entries(
arrays.reduce((map, arr, n) => {
arr.forEach((v) => map[v] = (map[v] ?? 0n) | (1n << BigInt(n)));
return map;
}, {})
)
.filter(([value, bitmap]) => (bitmap & (bitmap-1n)) == 0)
.map(([value, bitmap]) => value);
};
console.log(onlyoneBigInt(arr1, arr2, arr3));
Maybe someone sees something that can be improved to make this faster?
This is really just Set operations. The method single below finds any entry in a test array that does not appear in the other arrays in the collection. Deliberately implementing this so you can test individual arrays since it's not clear in the question if you need to return the letters, or the arrays.
let arr1 = ['a', 'b', 'c', 'a', 'b']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a']
// The set of arrays
let arrays = [ arr1, arr2, arr3 ]
// Finds any entries in the test array that doesn't appear in the arrays that aren't the test arrays
let singles = (test) => {
// others is the Set of all value in the other arrays
others = arrays.reduce( ( accum, elem ) => {
if (elem != test) { elem.forEach(accum.add, accum) }
return accum
}, new Set())
// find anything in the test array that the others do not have
return [...new Set(test.filter( value => ! others.has(value) ))]
}
// collect results from testing all arrays
result = []
for(const array of arrays) { result.push(...singles(array))
}
console.log(result)
Borrowing the parameter construction from #gog's excellent answer, you could also define it so that it takes a test array and an arbitrary collection of arrays to test against:
let singles = (test, ...arrays) => {
// others is the Set of all value in the other arrays
others = arrays.reduce( ( accum, elem ) => {
if (elem != test) { elem.forEach(accum.add, accum) }
return accum
}, new Set())
// find anything in the test array that the others do not have
return [...new Set(test.filter( value => ! others.has(value) ))]
}
console.log(singles(arr2, arr1, arr2, arr3))
The advantage here is that this should work with any number of arrays, while gog's answer is probably faster for a collection of less than 32 arrays (or technically any number if you were willing to extend it using BigInt, but that may lose some of the speed)
A fairly simple approach:
const inOnlyOne = (
xss,
keys = [... new Set (xss .flat ())],
uniques = xss .map (xs => new Set (xs))
) => keys .filter (k => uniques .filter (f => f .has (k)) .length == 1)
console .log (inOnlyOne ([['a', 'b', 'c', 'a', 'b'], ['a', 'd', 'b', 'c'], ['f', 'c', 'a']]))
We find the list of unique keys by flattening our array of arrays and turning that into a Set and then back into an array, convert the arrays into Sets, then filter the keys to find only those where the number of sets including that key has exactly one entry.
There is a little inefficiency here in that we check all the Sets when seeing if a number is in there. It would be easy enough to modify it to check only until we find a second Set, but the code would be more complex. I would only bother to do so if I found that this simple version was not performant enough for my needs.
One advantage of this approach is that it works for other data types than strings and numbers:
const a = {a: 1}, b = {b: 3}, c = {c: 3}, d = {d: 4}, e = {e: 5}, f = {f: 6}
inOnlyOne ([[a, b, c, a, b], [a, d, b, c], [f, c, a]])
//=> [{d: 4}, {f: 6}]
Of course that only helps if your items are shared references. If you wanted to use value equality rather than reference equality, it would be significantly more complex.
If we wanted to pass the arrays individually, rather than wrap them in a common array, this variant should work:
const inOnlyOne = (...xss) => ((
keys = [... new Set (xss .flat ())],
uniques = xss .map (xs => new Set (xs))
) => keys .filter (k => uniques .filter (f => f .has (k)) .length == 1)
) ()
The Array.prototype.includes() method seems like the way to go here.
let arr1 = ['a', 'b', 'c', 'a', 'b']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a', 'f']
var arrays = [arr1,arr2,arr3];
const items = arr1.concat(arr2, arr3);
let results = [];
items.forEach(isInOneArray);
function isInOneArray(item){
let found = 0;
for (const arr of arrays){
if (arr.includes(item)){
found ++;
}
}
if (found===1 && !results.includes(item)){
results.push(item);
}
}
console.log(results);
This is a brute force iterator much like your own, but reduces the number of re-entries by removing items from the array:
function elementsInOnlyOneArr(...arrays) {
// de-dup and sort so we process the longest array first
let sortedArrays = arrays.map(ar => [...new Set(ar)]).sort((a,b) => b.length - a.length);
for (let ai1 = 0 ; ai1 < sortedArrays.length; ai1 ++) {
for(let i = sortedArrays[ai1].length - 1; i >= 0; i --){
let exists = false;
let val = sortedArrays[ai1][i];
for(let ai2 = ai1 + 1 ; ai2 < sortedArrays.length ; ai2 ++) {
let foundIndex = sortedArrays[ai2].indexOf(val);
if (foundIndex >= 0) {
exists = true;
sortedArrays[ai2].splice(foundIndex,1);
// do not break, check for match in the other arrays
}
}
// if there was a match in any of the other arrays, remove it from the first one too!
if (exists)
sortedArrays[ai1].splice(i,1);
}
}
// concat the remaining elements, they are all unique
let output = sortedArrays[0];
for(let i = 1; i < sortedArrays.length; i ++)
output = output.concat(sortedArrays[i]);
return output;
}
let arr1 = ['a', 'b', 'c', 'a', 'b']
let arr2 = ['a', 'd', 'b', 'c']
let arr3 = ['f', 'c', 'a']
console.log(elementsInOnlyOneArr(arr1,arr2,arr3));
See this fiddle: https://jsfiddle.net/4deq7xwm/
Updated - Use splice() instead of pop()
Create a collection of pairs (x,y) where x is an element (in your case, a string) and y identifies the array it comes from. Sort this in O(log n) time by x first (where n is the total number of items over all arrays). It is easy to iterate over the result and detect the desired items.
This is easily solved with the built-in .lastIndexOf() Array method:
const arr1 = ['a', 'b', 'c', 'a', 'b'];
const arr2 = ['a', 'd', 'b', 'c'];
const arr3 = ['f', 'c', 'a'];
function getValuesInOneArray(...arrays) {
const combinedArr = arrays.flat();
const result = [];
for (const value of combinedArr) {
if (combinedArr.indexOf(value) === combinedArr.lastIndexOf(value)) {
result.push(value);
}
}
return result;
}
getValuesInOneArray(arr1, arr2, arr3); // ['d', 'f']
I generally try to avoid "ninja code" for the benefit of maintainability and readability, but I couldn't resist rewriting the above getValuesInOneArray() function as a slicker arrow function.
const getValuesInOneArray = (...arrays) =>
arrays
.flat()
.filter(
(value, index, array) => array.indexOf(value) === array.lastIndexOf(value)
);
You can read more about "ninja code" (and why you should avoid it) here, on Javacript.info, but I recommend avoiding practices like this in production codebases.
Hope this helps.
function elementsInOnlyOneArr(arr1, arr2, arr3){
let arr = arr1.concat(arr2).concat(arr3);
return removeDuplicate(arr);
}
function removeDuplicate(arr){
for(each of arr){
let count = 0;
for(ch of arr){
if(each === ch){
count++;
if(count > 1){
//removing element that exist more than one
arr = arr.filter(item => item !== each);
return removeDuplicate(arr);
}
}
}
}
return arr;
}
let arr1 = ['a', 'b', 'c', 'a', 'b'];
let arr2 = ['a', 'd', 'b', 'c'];
let arr3 = ['f', 'c', 'a'];
console.log(elementsInOnlyOneArr(arr1, arr2, arr3));
Do a diff of each of the array and concat those to get the unique values only in any one of the arrays.
const arr1 = ['a', 'b', 'c', 'a', 'b'];
const arr2 = ['a', 'd', 'b', 'c'];
const arr3 = ['f', 'c', 'a'];
function diff(a1, a2, a3) {
let u1 = a1.filter(el => { return !a2.includes(el) })
.filter(el => { return !a3.includes(el) });
let u2 = a2.filter(el => { return !a1.includes(el) })
.filter(el => { return !a3.includes(el) });
let u3 = a3.filter(el => { return !a2.includes(el) })
.filter(el => { return !a1.includes(el) });
return u1.concat(u2).concat(u3);
}
/* diff them */
const adiff = diff(arr1, arr2, arr3);
console.log(adiff);
I'm trying to double each element in an array
let arr = ['onions', 'tomatoes', 'etc'...';
with a for loop and keep getting NaN error... I'm still learning so any advice would be appreciated.
I've tried for loop, .map(), and other methods, but just can't see the obvious problem...
let newIngr = tortSoup.filter(function(value, index, arr) {
if (value !== 'onion' && value !== 'red pepper') {
return value;
console.log(newIngr);
});
}
let myReci = [];
for(var i = 0; i < newIngr.length; i++) {
myReci[i] = newIngr[i] * 2;
}
console.log(myReci);
Expected: each array element multiped by two and returned:
['onions', tomatoes', 'garlic', 'fontina']
would become:
['onions', 'onions', 'tomoatoes', 'tomatoes', garlic, 'garlic', 'fontina', 'fontina']
Here is a way to do it with Array.reduce() and the spread operator:
const array = ['onions', 'tomatoes', 'garlic', 'fontina'];
const result = array.reduce((acc, x) => ([...acc, x, x]), []);
console.log(result)
Array.reduce iterates over your input array and calls the callback for each element. This callback is given two arguments, the first is the output from the last iteration, and the second one is the current array item.
The callback here returns a new array composed of the previous result of the callback (spread into the new array with the spread operator ...) and the current item repeated twice.
To start the reducing process, we also need an initial value, here we give an empty array, (last argument to reduce).
Here is a detailed description of the values of acc and x in the callback for the following reduction:
['a', 'b', 'c'].reduce((acc, x) => ([...acc, x, x]), []);
acc = [], x = 'a' => returns ['a', 'a']
acc = ['a', 'a'], x = 'b' => returns ['a', 'a', 'b', 'b']
acc = ['a', 'a', 'b', 'b'], x = 'c' => returns ['a', 'a', 'b', 'b', 'c', 'c']
Iterate over input array using .map().
Initialize new array using Array() constructor and filling it using .fill() method of arrays.
Finally you can convert array of arrays to a single array using .concat() and spread operator.
const input = ['onions', 'tomatoes', 'garlic', 'fontina'];
const dupeValues = (arr, factor) => [].concat(...arr.map(s => new Array(factor).fill(s)));
console.log(dupeValues(input, 2));
console.log(dupeValues(input, 3));
Use Array.flatMap() (not supported by IE/Edge):
const array = ['onions', 'tomatoes', 'garlic', 'fontina'];
const result = array.flatMap(item => [item, item]);
console.log(result)
Using vanilla JavaScript :
const ingredients = [ 'onions', 'tomatoes', 'garlic', 'fontina' ]
const ingredientsToRemove = [ 'onions', 'red pepper' ]
// Using Array.reduce method
const doubleIngredients = ingredients.reduce(
( array, ingredient ) =>
{
// If the ingredient has to be removed, return the array
// Else return the array with two times the current ingredient
return ingredientsToRemove.includes( ingredient ) ?
array
:
[ ...array, ingredient, ingredient ]
},
[]
)
console.log({ ingredients, doubleIngredients })
Well the problem here is
string * 2 will not return 2 strings to you. it will return NaN
console.log('test'* 2) //NaN
What you're trying to achieve can be done by repeat method.
console.log('test '.repeat(2))
Your expected output can be achieved like this
let arr = ['onions', 'tomatoes', 'garlic', 'fontina']
let output = arr.reduce((op,inp)=>(op.concat([inp,inp])),[])
console.log(output)
I want to split an array of strings into two arrays.
However, when I push the strings into the new arrays, it should be alternating. So, if the array is:
let alph = [a,b,c,d,e,f]
Then the new arrays would look like:
firstArr = [a,c,e]
secondArr = [b,d,f]
How can I do it so I'm not repeating myself? I have the following code, and it works, but I do not want to write two of the same filter functions (keep things DRY):
let firstArr = alph.filter((letter, index) => {
return index % 2 === 0;
})
You could take an array of the both arrays and take the index as indicator for the wanted array for pushing.
let alph = ['a', 'b', 'c', 'd', 'e', 'f'],
first = [],
second = [],
temp = [first, second];
alph.forEach((v, i) => temp[i % 2].push(v));
console.log(first);
console.log(second);
Since filter creates one array, you need two, or use e.g. forEach
var arr = ["a","b","c","d","e","f"], firstArr = [], secondArr = [];
arr.forEach( (a,i) => {
(i % 2 === 0) ? firstArr.push(a) : secondArr.push(a);
})
console.log(firstArr)
console.log(secondArr)
For better readability there's nothing wrong with having separate filter functions for these. To clean it up a little you could use arrow functions and make them 1 liners and then pass them in the filter function, like:
const alpha = ['a', 'b', 'c', 'd', 'e', 'f'];
const filterByEvens = (letter, index) => index % 2 === 0;
const filterByOdds = (letter, index) => index % 2 !== 0;
const evens = alpha.filter(filterByEvens);
const odds = alpha.filter(filterByOdds);
you can use reduce for this :
const alph = ['a', 'b', 'c', 'd', 'e', 'f'];
const result = alph.reduce((acc, letter, ndx) => {
acc[ndx % 2] = acc[ndx % 2] || [];
acc[ndx % 2].push(letter);
return acc;
}, []);
const [firstArr, secondArr] = result;
console.log(firstArr, secondArr);
I have a little question about joining arrays. I have an array of letters, something like that:
let array = ['a','b','','c']
I wan't to join elements in array to have output like that:
let array = ['ab','c']
Can you help me? I was searching but everything i found was about removing whitespaces from arrays or string :(
Something along these lines:
let array = ['a', 'b', '', 'c'];
let res = array.reduce((res, s) => {
if (s.length) {
res[res.length - 1] += s;
} else {
res.push('');
}
return res;
}, ['']);
console.log(res);
It does make the assumption that there will be at least one string in the array, that the last element won't be an empty string and that there won't be two adjacent empty strings. Adjust as necessary if those are concerns.
You can use a combination of Array#map, Array#join and String#Split to achieve what you want.
Here, I used a space as the delimiter, but you can use anything that you don't use in your array.
let array = ['a','b','','c'];
let result = array.map(e => e.length ? e : ' ').join('').split(' ');
console.log(result);
You could use reduce() method to create new array and one variable to increment on empty string.
let array = ['a', 'b', '', 'c', 'd', 'e', '', '', '', 'f', '', 'g'];
let i = 0;
let result = array.reduce((r, e, j, arr) => {
r[i] = (r[i] || '') + e;
if (!e && arr[j - 1]) i++
return r;
}, [])
console.log(result)
I have two arrays, of which the Remove should be removed from the List, and the number removed should be counted.
I'm using this to remove those from 'Remove':
let output=this.List.filter((el)=>{
return this.Remove.indexOf(el) <0;
})
This is working fine, but I'd like to count the number of items that have overlapped, and thus been removed in the filter.
For example, if List=['1','2','3','4','5'] and Remove=['1','4'], count would be 2.
Many thanks!
You can do this in one reduce run. For example, you can create an object and track both cleared array and a number of deleted elements:
const a = [1,2,3,4,5,6,7];
const b = [1,4,9]; // elements to remove
const c = a.reduce((acc, cur) => b.includes(cur)
? Object.assign(acc, {n: acc.n + 1})
: Object.assign(acc, {res: acc.res.concat(cur)}), { n: 0, res: []});
console.log(c)
If you don't want to compare lengths as suggested in comments (which is fine...), I can propose you the following solution using a counter:
let list = ['1', '2', '3', '4', '5'],
remove = ['1', '4'],
overlap = 0;
let output = list.filter(el => {
let bool = remove.indexOf(el) < 0;
if (!bool) overlap++;
return bool;
});
console.log(overlap);
Assuming all the items in the arrays a and b are unique, you can make use of Sets to get the absolute number of intersections as a one-liner:
const a = [1,2,3,4];
const b = [2,4,5];
const overlap = a.length + b.length - new Set(a.concat(b)).size; // => 2