Cycle function in Javascript - javascript

I new to Javascript and I am looking for a cycle function. Here's Clojure's implementation I am trying to find a cycle function that infinitely loops/recurses through values of an array. I was hoping to find something like this in the underscore library, but I could not find anything suitable. Ideally I would like to use something like this:
_.head(_.cycle([1,2,3]), 100)
This function would return an array of 100 elements:
[1,2,3,1,2,3,1,2,3,1,2,3,...]
Is there a function like this I can use in Javascript? Here's my feable attempt, but I can't seem to get it to work:
arr = [1,2,3,4,5,6,7,8,9];
var cycle = function(arr) {
arr.forEach(function(d, i) {
if (d === arr.length)
return d
d == 0
else {return d}
});
};
cycle(arr);

You could do something like:
var cycle = function(array, count) {
var output = [];
for (var i = 0; i < count; i++) {
output.push(array[i % array.length]);
}
return output;
}

An implementation of Clojure's cycle :
function cycle(input) {
return function (times) {
var i = 0, output = [];
while (i < times) {
output.push(input[i++ % input.length]);
}
return output;
};
}
Usage examples :
var chars = cycle(['a', 'b']);
chars(0) // []
chars(1) // ["a"]
chars(3) // ["a", "b", "a"]
cycle([1, 2])(3) // [1, 2, 1]
An implementation of Clojure's take :
function take(length, input) {
return typeof input === 'function'
? input(length)
: input.slice(0, length);
}
Usage examples :
take(3, [1, 2, 3, 4]) // [1, 2, 3]
take(3, cycle([1, 2])) // [1, 2, 1]
Both implementations probably do not fit exactly Clojure's versions.

The problem with trying to emulate purely functional in JavaScript is eagerness: JavaScript doesn't have lazy evaluation and hence you can't produce infinite arrays in JavaScript. You need to define a lazy list in JavaScript. This is how I usually do it:
function cons(head, tail) {
return cont({
head: head,
tail: tail
});
}
function cont(a) {
return function (k) {
return k(a);
};
}
The cons function is similar to the cons function in LISP or the : constructor in Haskell. It takes an element and a list and returns a new list with the element inserted at the beginning of the list. The cont function creates a continuation (really useful for reifying thunks to emulate lazy evaluation).
Creating a list using cons is very simple:
var list = cons(1, cons(2, cons(3, cons(4, cons(5, null)))));
var array = [1, 2, 3, 4, 5];
The above list and array are equivalent. We can create two function to convert arrays to lists and vice-versa:
function toList(array) {
var list = null, length = array.length;
while (length) list = cons(array[--length], list);
return list;
}
function toArray(list) {
var array = [];
while (list) {
list = list(id);
array = array.concat(list.head);
list = list.tail;
}
return array;
}
function id(x) {
return x;
}
Now that we have a method of implementing lazy lists in JavaScript let's create the cycle function:
function cycle(list) {
list = list(id);
var head = list.head;
var tail = join(list.tail, cons(head, null));
return function (k) {
return k({
head: head,
tail: cycle(tail)
});
};
}
function join(xs, ys) {
if (xs) {
xs = xs(id);
return cons(xs.head, join(xs.tail, ys));
} else return ys;
}
Now you can create an infinite list as follows:
var list = cycle(toList([1,2,3]));
Let's create a take function to get the first 100 elements of the list:
function take(n, xs) {
if (n > 0) {
xs = xs(id);
return cons(xs.head, take(n - 1, xs.tail));
} else return null;
}
We can now easily get an array of 100 elements with [1,2,3] repeating:
var array = toArray(take(100, list));
Let's see if it works as expected: http://jsfiddle.net/TR9Ma/
To summarize, lazy functional programming in JavaScript is not as much fun as it is in purely functional languages like Haskell. However with a little bit of effort you can make it work.

Here is a slightly more compact version:
function cycle(arr, count) {
for (var i = 0, out = []; i < count; i++) {
out.push(arr[i % arr.length]);
}
return out;
}
And a JSFiddle (outputs the results to the console):
http://jsfiddle.net/2F9hY/1/
Basically just loops through count number of times, getting the i % arr.length item and adding it to the array.

This function should work. You can put the mod operation to good use here.
var cycle = function(input, n) {
var output = [];
for (var i = 0; i < n; i++) {
var j = i % input.length;
output.push(input[j]);
}
return output;
}
Here's a working fiddle: http://jsfiddle.net/K6UhS/1/
Also, I wouldn't introduce a whole library just for this function!

The wu library includes a cycle function which does this:
wu.cycle([ 1, 2, 3 ]).take(10).toArray() // [ 1, 2, 3, 1, 2, 3, 1, 2, 3, 1 ]
If you don't need support for iterators/streams/infinite lists, and just want a function that cycles through an array's values, lei-cycle provides a much more lightweight solution:
const Cycle = require('lei-cycle')
let c = Cycle([ 1, 2, 3 ])
console.log(c()) // 1
console.log(c()) // 2
console.log(c()) // 3
console.log(c()) // 1
// ...

function cycle(array) {
let result = [...array]
result[Symbol.iterator] = function* () {
while (true)
yield* this.values()
}
return result
}

class Cycle {
constructor(array) {
this.array = array;
}
next () {
var x = this.array.shift();
this.array.push(x);
return x;
}
}
const cycle = new Cycle(['a','b','c']);
console.log(cycle.next()); // returns "a"
console.log(cycle.next()); // returns "b"
console.log(cycle.next()); // returns "c"
console.log(cycle.next()); // returns "a"
console.log(cycle.next()); // returns "b"
...

Related

Checking whether the number of unique numbers within array exceeds n

Just as title reads, I need to check whether the number of unique entries within array exceeds n.
Array.prototype.some() seems to fit perfectly here, as it will stop cycling through the array right at the moment, positive answer is found, so, please, do not suggest the methods that filter out non-unique records and measure the length of resulting dataset as performance matters here.
So far, I use the following code, to check if there's more than n=2 unique numbers:
const res = [1,1,2,1,1,3,1,1,4,1].some((e,_,s,n=2) => s.indexOf(e) != s.lastIndexOf(e) ? false : n-- ? false : true);
console.log(res);
.as-console-wrapper { min-height: 100%}
And it returns false while there's, obviously 3 unique numbers (2,3,4).
Your help to figure out what's my (stupid) mistake here is much appreciated.
p.s. I'm looking for a pure JS solution
You can use a Map() with array values as map keys and count as values. Then iterate over map values to find the count of unique numbers. If count exceeds the limit return true, if not return false.
Time complexity is O(n). It can't get better than O(n) because every number in the array must be visited to find the count of unique numbers.
var data = [1, 1, 2, 1, 1, 3, 1, 1, 4, 1];
function exceedsUniqueLimit(limit) {
var map = new Map();
for (let value of data) {
const count = map.get(value);
if (count) {
map.set(value, count + 1);
} else {
map.set(value, 1);
}
}
var uniqueNumbers = 0;
for (let count of map.values()) {
if (count === 1) {
uniqueNumbers++;
}
if (uniqueNumbers > limit) {
return true;
}
}
return false;
}
console.log(exceedsUniqueLimit(2));
To know if a value is unique or duplicate, the whole array needs to be scanned at least once (Well, on a very large array there could be a test to see how many elements there is left to scan, but the overhead for this kind of test will make it slower)
This version uses two Set
function uniqueLimit(data,limit) {
let
dup = new Set(),
unique = new Set(),
value = null;
for (let i = 0, len = data.length; i < len; ++i) {
value = data[i];
if ( dup.has(value) ) continue;
if ( unique.has(value) ) {
dup.add(value);
unique.delete(value);
continue;
}
unique.add(value);
}
return unique.size > limit;
}
I also tried this version, using arrays:
function uniqueLimit(data, limit) {
let unique=[], dup = [];
for (let idx = 0, len = data.length; idx < len; ++idx) {
const value = data[idx];
if ( dup.indexOf(value) >= 0 ) continue;
const pos = unique.indexOf(value); // get position of value
if ( pos >= 0 ) {
unique.splice(pos,1); // remove value
dup.push(value);
continue;
}
unique.push(value);
}
return unique.length > limit;
};
I tested several of the solutions in this thread, and you can find the result here. If there are only a few unique values, the method by using arrays is the fastest, but if there are many unique values it quickly becomes the slowest, and on large arrays slowest by several magnitudes.
More profiling
I did some more tests with node v12.10.0. The results are normalized after the fastest method for each test.
Worst case scenario: 1000000 entries, all unique:
Set 1.00 // See this answer
Map 1.26 // See answer by Nikhil
Reduce 1.44 // See answer by Bali Balo
Array Infinity // See this answer
Best case scenario: 1000000 entries, all the same:
Array 1.00
Set 1.16
Map 2.60
Reduce 3.43
Question test case: [1, 1, 2, 1, 1, 3, 1, 1, 4, 1]
Array 1.00
Map 1.29
Set 1.47
Reduce 4.25
Another test case: [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,
1,1,1,1,1,1,1,3,4,1,1,1,1,1,1,1,2,1,1,1,
1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,
1,1,1,1,1,1,1,5 ]
Array 1.00
Set 1.13
Map 2.24
Reduce 2.39
Conclusion
The method that uses Set works for both small and large arrays, and performs well regardless of if there are many unique values or not. The version that are using arrays can be faster if there are few unique values, but quickly becomes very slow if there are many unique values.
Using sets, We count hypothetical unique set size and duplicateSet size and delete unique set element for each duplicate found. If unique set size goes below n, we stop iterating.
function uniqueGtN(res, n) {
let uniqSet = new Set(res);
let max = uniqSet.size;
if (max <= n) return false;
let dupSet = new Set();
return !res.some(e => {
if (dupSet.has(e)) {
if (uniqSet.has(e)) {
uniqSet.delete(e);
console.log(...uniqSet);
return (--max <= n);
}
} else {
dupSet.add(e);
}
});
}
console.log(uniqueGtN([1, 1, 2, 1, 1, 3, 3, 1], 2));
From your original solution, I have changed few things, it seems to be working fine:
(function() {
const array = [1,1,2,1,1,3,1,1,4,1];
function hasExceedingUniqueNumber(array, number) {
return array.some((e,_,s,n=number) => {
let firstIndex = s.indexOf(e);
let lastIndex = s.lastIndexOf(e);
// NOT unique
if (firstIndex != lastIndex) {
return false;
}
// unique
return e > n;
});
}
console.log('1', hasExceedingUniqueNumber(array, 1));
console.log('2', hasExceedingUniqueNumber(array, 2));
console.log('3', hasExceedingUniqueNumber(array, 3));
console.log('4', hasExceedingUniqueNumber(array, 4));
})();
So the shorter version looks like this:
(function() {
const array = [1,1,2,1,1,3,1,1,4,1];
function hasExceedingUniqueNumber(array, number) {
return array.some((e,_,s,n=number) => s.indexOf(e) != s.lastIndexOf(e) ? false : e > n);
}
console.log('1', hasExceedingUniqueNumber(array, 1));
console.log('2', hasExceedingUniqueNumber(array, 2));
console.log('3', hasExceedingUniqueNumber(array, 3));
console.log('4', hasExceedingUniqueNumber(array, 4));
})();
The code listed in your question does not work because m is not shared across the calls to the some callback function. It is a parameter, and its value is 2 at each iteration.
To fix this, either put m outside, or use the thisArg of the some function (but that means you can't use an arrow function)
let m = 2;
const res = [1,1,1,2,1,1,3,1,1,1,4,1,1]
.sort((a,b) => a-b)
.some((n,i,s) => i > 0 && n == s[i-1] ? !(m--) : false);
// ----- or -----
const res = [1,1,1,2,1,1,3,1,1,1,4,1,1]
.sort((a,b) => a-b)
.some(function(n,i,s) { return i > 0 && n == s[i-1] ? !(this.m--) : false; }, { m: 2 });
Note: this code seems to count if the number of duplicates exceeds a certain value, not the number of unique values.
As another side note, I know you mentioned you did not want to use a duplicate removal algorithm, but performant ones (for example hash-based) would result in something close to O(n).
Here is a solution to count all the values appearing exactly once in the initial array. It is a bit obfuscated and hard to read, but you seem to be wanting something concise. It is the most performant I can think of, using 2 objects to store values seen at least once and the ones seen multiple times:
let res = [1,1,2,3,4].reduce((l, e) => (l[+!l[1][e]][e] = true, l), [{},{}]).map(o => Object.keys(o).length).reduce((more,once) => once-more) > 2;
Here is the less minified version for people who don't like the short version:
let array = [1,1,2,3,4];
let counts = array.reduce((counts, element) => {
if (!counts.atLeastOne[element]) {
counts.atLeastOne[element] = true;
} else {
counts.moreThanOne[element] = true;
}
return counts;
}, { atLeastOne: {}, moreThanOne: {} });
let exactlyOnceCount = Object.keys(counts.atLeastOne).length - Object.keys(counts.moreThanOne).length;
let isOverLimit = exactlyOnceCount > 2;
Whenever I have a type of problem like this, I always like to peek at how the underscore JS folks have done it.
[Ed again: removed _.countBy as it isn't relevant to the answer]
Use the _.uniq function to return a list of unique values in the array:
var u = _.uniq([1,1,2,2,2,3,4,5,5]); // [1,2,3,4,5]
if (u.length > n) { ...};
[ed:] Here's how we might use that implementation to write our own, opposite function that returns only non-unique collection items
function nonUnique(array) {
var result = [];
var seen = [];
for (var i = 0, length = array.length; i < length; i++) {
var value = array[i];
if (seen.indexOf(value) === -1) { // warning! naive assumption
seen.push(value);
} else {
result.push(value);
}
}
console.log("non-unique result", result);
return result;
};
function hasMoreThanNUnique(array, threshold) {
var uArr = nonUnique(array);
var accum = 0;
for (var i = 0; i < array.length; i++) {
var val = array[i];
if (uArr.indexOf(val) === -1) {
accum++;
}
if (accum > threshold) return true;
}
return false;
}
var testArrA = [1, 1, 2, 2, 2, 3, 4, 5]; // unique values: [3, 4, 5]
var testArrB = [1, 1, 1, 1, 4]; // [4]
var testResultsA = hasMoreThanNUnique(testArrA, 3)
console.log("testArrA and results", testResultsA);
var testResultsB = hasMoreThanNUnique(testArrB, 3);
console.log("testArrB and results", testResultsB);
So far, I came up with the following:
const countNum = [1,1,1,2,1,1,3,1,1,1,4,1,1].reduce((r,n) => (r[n]=(r[n]||0)+1, r), {});
const res = Object.entries(countNum).some(([n,q]) => q == 1 ? !(m--) : false, m=2);
console.log(res);
.as-console-wrapper{min-height:100%}
But I don't really like array->object->array conversion about that. Is there a faster and (at the same time compact) solution?

Convert first N item in iterable to Array

Something similar to question Convert ES6 Iterable to Array. But I only want first N items. Is there any built-in for me to do so? Or how can I achieve this more elegantly?
let N = 100;
function *Z() { for (let i = 0; ; i++) yield i; }
// This wont work
// Array.from(Z()).slice(0, N);
// [...Z()].slice(0, N)
// This works, but a built-in may be preferred
let a = [], t = Z(); for (let i = 0; i < N; i++) a.push(t.next().value);
To get the first n values of an iterator, you could use one of:
Array.from({length: n}, function(){ return this.next().value; }, iterator);
Array.from({length: n}, (i => () => i.next().value)(iterator));
To get the iterator of an arbitrary iterable, use:
const iterator = iterable[Symbol.iterator]();
In your case, given a generator function Z:
Array.from({length: 3}, function(){ return this.next().value; }, Z());
If you need this functionality more often, you could create a generator function:
function* take(iterable, length) {
const iterator = iterable[Symbol.iterator]();
while (length-- > 0) yield iterator.next().value;
}
// Example:
const set = new Set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
console.log(...take(set, 3));
There is no built in method to take only a certain number of items from an iterable (ala something like take()). Although your snippet can be somewhat improved with a for of loop, which is specifically meant to work with iterables, eg:
let a = []; let i = 0; for (let x of Z()) { a.push(x); if (++i === N) break; }
Which might be better since your original snippet would continue looping even if there are not N items in the iterable.
A bit shorter and less efficient with .map, and a bit safer with custom function:
function *Z() { for (let i = 0; i < 5; ) yield i++; }
function buffer(t, n = -1, a = [], c) {
while (n-- && (c = t.next(), !c.done)) a.push(c.value); return a; }
const l = console.log, t = Z()
l( [...Array(3)].map(v => t.next().value) )
l( buffer(t) )
how can I achieve this more elegantly?
One possible elegant solution, using iter-ops library:
import {pipe, take} from 'iter-ops';
const i = pipe(
Z(), // your generator result
take(N) // take up to N values
); //=> Iterable<number>
const arr = [...i]; // your resulting array
P.S. I'm the author of the library.

Iterative solution for flattening n-th nested arrays in Javascript

Can anyone show me an iterative solution for the following problem? I solved it recursively but struggled with an iterative solution. (Facebook Technical Interview Question)
Input: [1, {a: 2}, [3], [[4, 5], 6], 7]
Output: [1, {a: 2}, 3, 4, 5, 6, 7]
Solution must work with n-th nested array elements (i.e. it must still work if someone modifies the array values/placement in the example above)
Recursive solution:
var flatten = function(input) {
var result = [];
input.forEach(function(element) {
result = result.concat(Array.isArray(element) ? flatten(element) : element);
});
return result;
}
Here is one way:
var input = [1, {a: 2}, [3], [[4, 5], 6], 7];
function flatten(input) {
var i, placeHolder = [input], lastIndex = [-1], out = [];
while (placeHolder.length) {
input = placeHolder.pop();
i = lastIndex.pop() + 1;
for (; i < input.length; ++i) {
if (Array.isArray(input[i])) {
placeHolder.push(input);
lastIndex.push(i);
input = input[i];
i = -1;
} else out.push(input[i]);
}
}
return out;
}
flatten(input);
Explanation: If iterating over a nested structure, you just have to remember where you were before by saving the current array and position before moving into the nested array (this is usually taken care of via the stack for recursive solutions).
Note: If you reuse the arrays placeHolder and lastIndex you won't need to keep recreating them every time. Perhaps something like this:
var flatten = function(){
var placeHolder = [], lastIndex = [];
placeHolder.count = 0;
lastIndex.count = 0;
return function flatten(input) {
var i, out = [];
placeHolder[0] = input; placeHolder.count = 1;
lastIndex[0] = -1; lastIndex.count = 1;
while (placeHolder.count) {
input = placeHolder[--placeHolder.count];
i = lastIndex[--lastIndex.count] + 1;
for (; i < input.length; ++i) {
if (Array.isArray(input[i])) {
placeHolder[placeHolder.count++] = input;
lastIndex[lastIndex.count++] = i;
input = input[i];
i = -1;
} else out.push(input[i]);
}
}
return out;
}
}();
This is even faster again (for flat iteration that is), and less garbage collector issues calling it many times. The speed is very close to that of recursive function calling in Chrome, and many times faster than recursion in FireFox and IE.
I recreated Tomalak's tests here since the old jsPerf is broken for editing: https://jsperf.com/iterative-array-flatten-2
How about this?
inp = [1, {a: 2}, [3], [[4, 5], 6], 7]
out = inp;
while(out.some(Array.isArray))
out = [].concat.apply([], out);
document.write(JSON.stringify(out));
Works, but not recommended:
var flatten = function(input) {
return eval("[" + JSON.stringify(input).
replace(/\[/g,"").replace(/\]/g,"") + "]");
}
Here's a solution that flattens in place.
function flatten(arr) {
var i = 0;
if (!Array.isArray(arr)) {
/* return non-array inputs immediately to avoid errors */
return arr;
}
while (i < arr.length) {
if (Array.isArray(arr[i])) {
arr.splice(i, 1, ...arr[i]);
} else {
i++;
}
}
return arr;
}
This solution iterates through the array, flattening each element one level of nesting at a time until it cannot be flattened any more.
function flatten(array){
for(var i=0;i<array.length;i++)
if(Array.isArray(array[i]))
array.splice.apply(array,[i,1].concat(array[i--]));
return array;
}
This in-place solution is faster than Lupe's, now that I've removed all of the inner curly brackets (I inlined the i-- in the concat parameter to do that).
A different iterative algorithm:
function flatten2(input) {
var output = [];
var todo = [input];
var current;
var head;
while(todo.length) {
var current = todo.shift();
if(Array.isArray(current)) {
current = current.slice();
head = current.shift();
if(current.length) {
todo.unshift(current)
}
todo.unshift(head);
} else {
output.push(current);
}
}
return output;
}
Put all elements on a stack.
While the stack is not empty, remove the first element.
If that element is a scalar, add it to the output.
If that element is an array, split it into head (first element) and tail (remaining elements) and add both to the stack.
As Tomalak's JSPerf shows, this is pretty slow.
JSBin
A fairly concise, readable algorithm:
function flatten(input) {
var output = [];
var todo = [input];
var current;
while(todo.length) {
var current = todo.shift();
if(Array.isArray(current)) {
todo.unshift.apply(todo, current)
} else {
output.push(current);
}
}
return output;
}
This version performs better than my other answer, but is still significantly slower than James Wilkins' answer.
JSBin
Tomalak's JSPerf
Here are two approaches, recursive and iterative and their comparison to Array.flat.
Maybe it'll help someone
const arrayToFlatten = [[1], [2, [3]], null, [[{}]], undefined];
// takes an array and flattens it recursively, default depth is 1 (just like Array.flat())
function flattenRecursive(arr, depth = 1) {
let myArray = [];
if (depth === 0){ // if you've reached the depth don't continue
myArray = arr;
} else if(!Array.isArray(arr)) { // add item to array if not an array
myArray.push(arr);
} else { // flatten each item in the array then concatenate
arr.forEach(item => {
const someNewArray = flattenRecursive(item, depth - 1);
myArray = myArray.concat(someNewArray);
});
}
return myArray;
}
// takes an array and flattens it using a loop, default depth is 1 (just like Array.flat())
function flattenIterative(arr, depth = 1) {
let result = arr;
// if an element is an array
while(result.some(Array.isArray) && depth) {
// flatten the array by one level by concating an empty array and result using apply
result = [].concat.apply([], result);
depth--; // track depth
}
return result;
}
console.log(arrayToFlatten.flat(2)); // ES^
console.log(flattenRecursive(arrayToFlatten, 2));
console.log(flattenIterative(arrayToFlatten, 2));
Here's my solution to this:
function flattenList(A) {
let result = []
for (let i=0; i < A.length; i++) {
if (typeof A[i] == "object"){
let item = reduceArray(A[i])
result.push(...item)
}else {
result.push(A[i])
}
}
return result
}
function reduceArray(arr){
while(arr.some(Array.isArray)) {
let item = arr.find(Array.isArray)
let index = arr.indexOf(item)
arr[index] = item[0]
}
return arr
}
Not sure if the "stack" approach was used properly in previous answers. I think it could be simpler, like this:
function flatten(arr) {
const result = [];
const stack = [arr];
while (stack.length) {
const curr = stack.pop();
if (Array.isArray(curr)) {
for (let i = curr.length - 1; i >= 0; i--) {
stack.push(curr[i]);
}
} else {
result.push(curr);
}
}
return result;
}
Not sure why the other answers are so complicated, this can easily be achieved by looping through the array and flattening each entry until it's no longer an array.
const flatten = (arr) => {
for (let i = 0; i < arr.length; i++) {
while (Array.isArray(arr[i])) {
arr.splice(i, 1, ...arr[i]);
}
}
return arr;
}

Getting a union of two arrays in JavaScript [duplicate]

This question already has answers here:
How to merge two arrays in JavaScript and de-duplicate items
(89 answers)
Closed 4 years ago.
Say I have an array of [34, 35, 45, 48, 49] and another array of [48, 55]. How can I get a resulting array of [34, 35, 45, 48, 49, 55]?
With the arrival of ES6 with sets and splat operator (at the time of being works only in Firefox, check compatibility table), you can write the following cryptic one liner:
var a = [34, 35, 45, 48, 49];
var b = [48, 55];
var union = [...new Set([...a, ...b])];
console.log(union);
Little explanation about this line: [...a, ...b] concatenates two arrays, you can use a.concat(b) as well. new Set() create a set out of it and thus your union. And the last [...x] converts it back to an array.
If you don't need to keep the order, and consider 45 and "45" to be the same:
function union_arrays (x, y) {
var obj = {};
for (var i = x.length-1; i >= 0; -- i)
obj[x[i]] = x[i];
for (var i = y.length-1; i >= 0; -- i)
obj[y[i]] = y[i];
var res = []
for (var k in obj) {
if (obj.hasOwnProperty(k)) // <-- optional
res.push(obj[k]);
}
return res;
}
console.log(union_arrays([34,35,45,48,49], [44,55]));
If you use the library underscore you can write like this
var unionArr = _.union([34,35,45,48,49], [48,55]);
console.log(unionArr);
<script src="https://cdnjs.cloudflare.com/ajax/libs/underscore.js/1.9.1/underscore-min.js"></script>
Ref: http://underscorejs.org/#union
I'm probably wasting time on a dead thread here. I just had to implement this and went looking to see if I was wasting my time.
I really like KennyTM's answer. That's just how I would attack the problem. Merge the keys into a hash to naturally eliminate duplicates and then extract the keys. If you actually have jQuery you can leverage its goodies to make this a 2 line problem and then roll it into an extension. The each() in jQuery will take care of not iterating over items where hasOwnProperty() is false.
jQuery.fn.extend({
union: function(array1, array2) {
var hash = {}, union = [];
$.each($.merge($.merge([], array1), array2), function (index, value) { hash[value] = value; });
$.each(hash, function (key, value) { union.push(key); } );
return union;
}
});
Note that both of the original arrays are left intact. Then you call it like this:
var union = $.union(array1, array2);
If you wants to concatenate two arrays without any duplicate value,Just try this
var a=[34, 35, 45, 48, 49];
var b=[48, 55];
var c=a.concat(b).sort();
var res=c.filter((value,pos) => {return c.indexOf(value) == pos;} );
function unique(arrayName)
{
var newArray=new Array();
label: for(var i=0; i<arrayName.length;i++ )
{
for(var j=0; j<newArray.length;j++ )
{
if(newArray[j]==arrayName[i])
continue label;
}
newArray[newArray.length] = arrayName[i];
}
return newArray;
}
var arr1 = new Array(0,2,4,4,4,4,4,5,5,6,6,6,7,7,8,9,5,1,2,3,0);
var arr2= new Array(3,5,8,1,2,32,1,2,1,2,4,7,8,9,1,2,1,2,3,4,5);
var union = unique(arr1.concat(arr2));
console.log(union);
Adapted from: https://stackoverflow.com/a/4026828/1830259
Array.prototype.union = function(a)
{
var r = this.slice(0);
a.forEach(function(i) { if (r.indexOf(i) < 0) r.push(i); });
return r;
};
Array.prototype.diff = function(a)
{
return this.filter(function(i) {return a.indexOf(i) < 0;});
};
var s1 = [1, 2, 3, 4];
var s2 = [3, 4, 5, 6];
console.log("s1: " + s1);
console.log("s2: " + s2);
console.log("s1.union(s2): " + s1.union(s2));
console.log("s2.union(s1): " + s2.union(s1));
console.log("s1.diff(s2): " + s1.diff(s2));
console.log("s2.diff(s1): " + s2.diff(s1));
// Output:
// s1: 1,2,3,4
// s2: 3,4,5,6
// s1.union(s2): 1,2,3,4,5,6
// s2.union(s1): 3,4,5,6,1,2
// s1.diff(s2): 1,2
// s2.diff(s1): 5,6
I like Peter Ajtai's concat-then-unique solution, but the code's not very clear. Here's a nicer alternative:
function unique(x) {
return x.filter(function(elem, index) { return x.indexOf(elem) === index; });
};
function union(x, y) {
return unique(x.concat(y));
};
Since indexOf returns the index of the first occurence, we check this against the current element's index (the second parameter to the filter predicate).
Shorter version of kennytm's answer:
function unionArrays(a, b) {
const cache = {};
a.forEach(item => cache[item] = item);
b.forEach(item => cache[item] = item);
return Object.keys(cache).map(key => cache[key]);
};
You can use a jQuery plugin: jQuery Array Utilities
For example the code below
$.union([1, 2, 2, 3], [2, 3, 4, 5, 5])
will return [1,2,3,4,5]
function unite(arr1, arr2, arr3) {
newArr=arr1.concat(arr2).concat(arr3);
a=newArr.filter(function(value){
return !arr1.some(function(value2){
return value == value2;
});
});
console.log(arr1.concat(a));
}//This is for Sorted union following the order :)
function unionArrays() {
var args = arguments,
l = args.length,
obj = {},
res = [],
i, j, k;
while (l--) {
k = args[l];
i = k.length;
while (i--) {
j = k[i];
if (!obj[j]) {
obj[j] = 1;
res.push(j);
}
}
}
return res;
}
var unionArr = unionArrays([34, 35, 45, 48, 49], [44, 55]);
console.log(unionArr);
Somewhat similar in approach to alejandro's method, but a little shorter and should work with any number of arrays.
function unionArray(arrayA, arrayB) {
var obj = {},
i = arrayA.length,
j = arrayB.length,
newArray = [];
while (i--) {
if (!(arrayA[i] in obj)) {
obj[arrayA[i]] = true;
newArray.push(arrayA[i]);
}
}
while (j--) {
if (!(arrayB[j] in obj)) {
obj[arrayB[j]] = true;
newArray.push(arrayB[j]);
}
}
return newArray;
}
var unionArr = unionArray([34, 35, 45, 48, 49], [44, 55]);
console.log(unionArr);
Faster
http://jsperf.com/union-array-faster
I would first concatenate the arrays, then I would return only the unique value.
You have to create your own function to return unique values. Since it is a useful function, you might as well add it in as a functionality of the Array.
In your case with arrays array1 and array2 it would look like this:
array1.concat(array2) - concatenate the two arrays
array1.concat(array2).unique() - return only the unique values. Here unique() is a method you added to the prototype for Array.
The whole thing would look like this:
Array.prototype.unique = function () {
var r = new Array();
o: for(var i = 0, n = this.length; i < n; i++)
{
for(var x = 0, y = r.length; x < y; x++)
{
if(r[x]==this[i])
{
continue o;
}
}
r[r.length] = this[i];
}
return r;
}
var array1 = [34,35,45,48,49];
var array2 = [34,35,45,48,49,55];
// concatenate the arrays then return only the unique values
console.log(array1.concat(array2).unique());
Just wrote before for the same reason (works with any amount of arrays):
/**
* Returns with the union of the given arrays.
*
* #param Any amount of arrays to be united.
* #returns {array} The union array.
*/
function uniteArrays()
{
var union = [];
for (var argumentIndex = 0; argumentIndex < arguments.length; argumentIndex++)
{
eachArgument = arguments[argumentIndex];
if (typeof eachArgument !== 'array')
{
eachArray = eachArgument;
for (var index = 0; index < eachArray.length; index++)
{
eachValue = eachArray[index];
if (arrayHasValue(union, eachValue) == false)
union.push(eachValue);
}
}
}
return union;
}
function arrayHasValue(array, value)
{ return array.indexOf(value) != -1; }
Simple way to deal with merging single array values.
var values[0] = {"id":1235,"name":"value 1"}
values[1] = {"id":4323,"name":"value 2"}
var object=null;
var first=values[0];
for (var i in values)
if(i>0)
object= $.merge(values[i],first)
You can try these:
function union(a, b) {
return a.concat(b).reduce(function(prev, cur) {
if (prev.indexOf(cur) === -1) prev.push(cur);
return prev;
}, []);
}
or
function union(a, b) {
return a.concat(b.filter(function(el) {
return a.indexOf(el) === -1;
}));
}
ES2015 version
Array.prototype.diff = function(a) {return this.filter(i => a.indexOf(i) < 0)};
Array.prototype.union = function(a) {return [...this.diff(a), ...a]}
If you want a custom equals function to match your elements, you can use this function in ES2015:
function unionEquals(left, right, equals){
return left.concat(right).reduce( (acc,element) => {
return acc.some(elt => equals(elt, element))? acc : acc.concat(element)
}, []);
}
It traverses the left+right array. Then for each element, will fill the accumulator if it does not find that element in the accumulator. At the end, there are no duplicate as specified by the equals function.
Pretty, but probably not very efficient with thousands of objects.
I think it would be simplest to create a new array, adding the unique values only as determined by indexOf.
This seems to me to be the most straightforward solution, though I don't know if it is the most efficient. Collation is not preserved.
var a = [34, 35, 45, 48, 49],
b = [48, 55];
var c = union(a, b);
function union(a, b) { // will work for n >= 2 inputs
var newArray = [];
//cycle through input arrays
for (var i = 0, l = arguments.length; i < l; i++) {
//cycle through each input arrays elements
var array = arguments[i];
for (var ii = 0, ll = array.length; ii < ll; ii++) {
var val = array[ii];
//only add elements to the new array if they are unique
if (newArray.indexOf(val) < 0) newArray.push(val);
}
}
return newArray;
}
[i for( i of new Set(array1.concat(array2)))]
Let me break this into parts for you
// This is a list by comprehension
// Store each result in an element of the array
[i
// will be placed in the variable "i", for each element of...
for( i of
// ... the Set which is made of...
new Set(
// ...the concatenation of both arrays
array1.concat(array2)
)
)
]
In other words, it first concatenates both and then it removes the duplicates (a Set, by definition cannot have duplicates)
Do note, though, that the order of the elements is not guaranteed, in this case.

Reordering arrays

Say, I have an array that looks like this:
var playlist = [
{artist:"Herbie Hancock", title:"Thrust"},
{artist:"Lalo Schifrin", title:"Shifting Gears"},
{artist:"Faze-O", title:"Riding High"}
];
How can I move an element to another position?
I want to move for example, {artist:"Lalo Schifrin", title:"Shifting Gears"} to the end.
I tried using splice, like this:
var tmp = playlist.splice(2,1);
playlist.splice(2,0,tmp);
But it doesn't work.
The syntax of Array.splice is:
yourArray.splice(index, howmany, element1, /*.....,*/ elementX);
Where:
index is the position in the array you want to start removing elements from
howmany is how many elements you want to remove from index
element1, ..., elementX are elements you want inserted from position index.
This means that splice() can be used to remove elements, add elements, or replace elements in an array, depending on the arguments you pass.
Note that it returns an array of the removed elements.
Something nice and generic would be:
Array.prototype.move = function (from, to) {
this.splice(to, 0, this.splice(from, 1)[0]);
};
Then just use:
var ar = [1,2,3,4,5];
ar.move(0,3);
alert(ar) // 2,3,4,1,5
Diagram:
If you know the indexes you could easily swap the elements, with a simple function like this:
function swapElement(array, indexA, indexB) {
var tmp = array[indexA];
array[indexA] = array[indexB];
array[indexB] = tmp;
}
swapElement(playlist, 1, 2);
// [{"artist":"Herbie Hancock","title":"Thrust"},
// {"artist":"Faze-O","title":"Riding High"},
// {"artist":"Lalo Schifrin","title":"Shifting Gears"}]
Array indexes are just properties of the array object, so you can swap its values.
With ES6 you can do something like this:
const swapPositions = (array, a ,b) => {
[array[a], array[b]] = [array[b], array[a]]
}
let array = [1,2,3,4,5];
swapPositions(array,0,1);
/// => [2, 1, 3, 4, 5]
Here is an immutable version for those who are interested:
function immutableMove(arr, from, to) {
return arr.reduce((prev, current, idx, self) => {
if (from === to) {
prev.push(current);
}
if (idx === from) {
return prev;
}
if (from < to) {
prev.push(current);
}
if (idx === to) {
prev.push(self[from]);
}
if (from > to) {
prev.push(current);
}
return prev;
}, []);
}
You could always use the sort method, if you don't know where the record is at present:
playlist.sort(function (a, b) {
return a.artist == "Lalo Schifrin"
? 1 // Move it down the list
: 0; // Keep it the same
});
Change 2 to 1 as the first parameter in the splice call when removing the element:
var tmp = playlist.splice(1, 1);
playlist.splice(2, 0, tmp[0]);
Immutable version, no side effects (doesn’t mutate original array):
const testArr = [1, 2, 3, 4, 5];
function move(from, to, arr) {
const newArr = [...arr];
const item = newArr.splice(from, 1)[0];
newArr.splice(to, 0, item);
return newArr;
}
console.log(move(3, 1, testArr));
// [1, 4, 2, 3, 5]
codepen: https://codepen.io/mliq/pen/KKNyJZr
EDIT: Please check out Andy's answer as his answer came first and this is solely an extension of his
I know this is an old question, but I think it's worth it to include Array.prototype.sort().
Here's an example from MDN along with the link
var numbers = [4, 2, 5, 1, 3];
numbers.sort(function(a, b) {
return a - b;
});
console.log(numbers);
// [1, 2, 3, 4, 5]
Luckily it doesn't only work with numbers:
arr.sort([compareFunction])
compareFunction
Specifies a function that defines the sort order. If omitted, the array is sorted according to each character's Unicode code point value, according to the string conversion of each element.
I noticed that you're ordering them by first name:
let playlist = [
{artist:"Herbie Hancock", title:"Thrust"},
{artist:"Lalo Schifrin", title:"Shifting Gears"},
{artist:"Faze-O", title:"Riding High"}
];
// sort by name
playlist.sort((a, b) => {
if(a.artist < b.artist) { return -1; }
if(a.artist > b.artist) { return 1; }
// else names must be equal
return 0;
});
note that if you wanted to order them by last name you would have to either have a key for both first_name & last_name or do some regex magic, which I can't do XD
Hope that helps :)
Try this:
playlist = playlist.concat(playlist.splice(1, 1));
If you only ever want to move one item from an arbitrary position to the end of the array, this should work:
function toEnd(list, position) {
list.push(list.splice(position, 1));
return list;
}
If you want to move multiple items from some arbitrary position to the end, you can do:
function toEnd(list, from, count) {
list.push.apply(list, list.splice(from, count));
return list;
}
If you want to move multiple items from some arbitrary position to some arbitrary position, try:
function move(list, from, count, to) {
var args = [from > to ? to : to - count, 0];
args.push.apply(args, list.splice(from, count));
list.splice.apply(list, args);
return list;
}
Time complexity of all answers is O(n^2) because had used twice spice. But O(n/2) is possible.
Most Perfomance Solution:
Array with n elements,
x is to, y is from
should be n >x && n > y
time complexity should be |y - x|. So its is number of elements that is between from and to.
bestcase: O(1); //ex: from:4 to:5
average : O(n/2)
worthcase : O(n) //ex: from:0 to:n
function reOrder(from,to,arr) {
if(from == to || from < 0 || to < 0 ) { return arr};
var moveNumber = arr[from];
if(from < to) {
for(var i =from; i< to; i++){
arr[i] = arr[i+1]
}
}
else{
for(var i = from; i > to; i--){
arr[i] = arr[i-1];
}
}
arr[to] = moveNumber;
return arr;
}
var arr = [0,1,2,3,4,5,6,7,8,9,10,11,12,13];
console.log(reOrder(3,7,arr));
As a simple mutable solution you can call splice twice in a row:
playlist.splice(playlist.length - 1, 1, ...playlist.splice(INDEX_TO_MOVE, 1))
On the other hand, a simple inmutable solution could use slice since this method returns a copy of a section from the original array without changing it:
const copy = [...playlist.slice(0, INDEX_TO_MOVE - 1), ...playlist.slice(INDEX_TO_MOVE), ...playlist.slice(INDEX_TO_MOVE - 1, INDEX_TO_MOVE)]
I came here looking for a rearranging complete array, I want something like I did below, but found most of the answers for moving only one element from position A to position B.
Hope my answer will help someone here
function reArrangeArray(firstIndex=0,arr){
var a = [];
var b = []
for(let i = 0; i<= (arr.length-1); i++){
if(i<firstIndex){
a.push(arr[i])
}else{
b.push(arr[i])
}
}
return b.concat(a)
}
const arrayToRearrange = [{name: 'A'},{name: 'B'},{name: 'C'},{name: 'D'},{name: 'E'}];
reArrangeArray(2,arrayToRearrange)
// Output
// [
// { name: 'C' },
// { name: 'D' },
// { name: 'E' },
// { name: 'A' },
// { name: 'B' }
// ]
Reorder its work This Way
var tmpOrder = playlist[oldIndex];
playlist.splice(oldIndex, 1);
playlist.splice(newIndex, 0, tmpOrder);
I hope this will work

Categories

Resources