I have an array of e-mails (it can be just 1 email, or 100 emails), and I need to send the array with an ajax request (that I know how to do), but I can only send an array that has 10 or less e-mails in it. So if there is an original array of 20 e-mails I will need to split them up into 2 arrays of 10 each. or if there are 15 e-mails in the original array, then 1 array of 10, and another array of 5. I'm using jQuery, what would be the best way to do this?
Don't use jquery...use plain javascript
var a = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15];
var b = a.splice(0,10);
//a is now [11,12,13,14,15];
//b is now [1,2,3,4,5,6,7,8,9,10];
You could loop this to get the behavior you want.
var a = YOUR_ARRAY;
while(a.length) {
console.log(a.splice(0,10));
}
This would give you 10 elements at a time...if you have say 15 elements, you would get 1-10, the 11-15 as you wanted.
var size = 10; var arrayOfArrays = [];
for (var i=0; i<bigarray.length; i+=size) {
arrayOfArrays.push(bigarray.slice(i,i+size));
}
console.log(arrayOfArrays);
Unlike splice(), slice() is non-destructive to the original array.
Just loop over the array, splicing it until it's all consumed.
var a = ['a','b','c','d','e','f','g']
, chunk
while (a.length > 0) {
chunk = a.splice(0,3)
console.log(chunk)
}
output
[ 'a', 'b', 'c' ]
[ 'd', 'e', 'f' ]
[ 'g' ]
You can use lodash:
https://lodash.com/docs
_.chunk(['a', 'b', 'c', 'd'], 2);
// → [['a', 'b'], ['c', 'd']]
Array.reduce could be inefficient for large arrays, especially with the mod operator. I think a cleaner (and possibly easier to read) functional solution would be this:
const chunkArray = (arr, size) =>
arr.length > size
? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
: [arr];
Assuming you don't want to destroy the original array, you can use code like this to break up the long array into smaller arrays which you can then iterate over:
var longArray = []; // assume this has 100 or more email addresses in it
var shortArrays = [], i, len;
for (i = 0, len = longArray.length; i < len; i += 10) {
shortArrays.push(longArray.slice(i, i + 10));
}
// now you can iterate over shortArrays which is an
// array of arrays where each array has 10 or fewer
// of the original email addresses in it
for (i = 0, len = shortArrays.length; i < len; i++) {
// shortArrays[i] is an array of email addresss of 10 or less
}
Another implementation:
const arr = ["H", "o", "w", " ", "t", "o", " ", "s", "p", "l", "i", "t", " ", "a", " ", "l", "o", "n", "g", " ", "a", "r", "r", "a", "y", " ", "i", "n", "t", "o", " ", "s", "m", "a", "l", "l", "e", "r", " ", "a", "r", "r", "a", "y", "s", ",", " ", "w", "i", "t", "h", " ", "J", "a", "v", "a", "S", "c", "r", "i", "p", "t"];
const size = 3;
const res = arr.reduce((acc, curr, i) => {
if ( !(i % size) ) { // if index is 0 or can be divided by the `size`...
acc.push(arr.slice(i, i + size)); // ..push a chunk of the original array to the accumulator
}
return acc;
}, []);
// => [["H", "o", "w"], [" ", "t", "o"], [" ", "s", "p"], ["l", "i", "t"], [" ", "a", " "], ["l", "o", "n"], ["g", " ", "a"], ["r", "r", "a"], ["y", " ", "i"], ["n", "t", "o"], [" ", "s", "m"], ["a", "l", "l"], ["e", "r", " "], ["a", "r", "r"], ["a", "y", "s"], [",", " ", "w"], ["i", "t", "h"], [" ", "J", "a"], ["v", "a", "S"], ["c", "r", "i"], ["p", "t"]]
NB - This does not modify the original array.
Or, if you prefer a functional, 100% immutable (although there's really nothing bad in mutating in place like done above) and self-contained method:
function splitBy(size, list) {
return list.reduce((acc, curr, i, self) => {
if ( !(i % size) ) {
return [
...acc,
self.slice(i, i + size),
];
}
return acc;
}, []);
}
As a supplement to #jyore's answer, and in case you still want to keep the original array:
var originalArray = [1,2,3,4,5,6,7,8];
var splitArray = function (arr, size) {
var arr2 = arr.slice(0),
arrays = [];
while (arr2.length > 0) {
arrays.push(arr2.splice(0, size));
}
return arrays;
}
splitArray(originalArray, 2);
// originalArray is still = [1,2,3,4,5,6,7,8];
I would like to share my solution as well. It's a little bit more verbose but works as well.
var data = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15];
var chunksize = 4;
var chunks = [];
data.forEach((item)=>{
if(!chunks.length || chunks[chunks.length-1].length == chunksize)
chunks.push([]);
chunks[chunks.length-1].push(item);
});
console.log(chunks);
Output (formatted):
[ [ 1, 2, 3, 4],
[ 5, 6, 7, 8],
[ 9, 10, 11, 12],
[13, 14, 15 ] ]
You can start with an empty array and push inside it sections with your desired range from the original array at the same time you are subtracting from your original array until is empty.
const originalArr = [1,2,3,4,5,6,7,8,9,10,11];
const splittedArray = [];
while (originalArr.length > 0) {
splittedArray.push(originalArr.splice(0,range));
}
output for range 3
splittedArray === [[1,2,3][4,5,6][7,8,9][10,11]]
output for range 4
splittedArray === [[1,2,3,4][5,6,7,8][9,10,11]]
This is also good for a fronted pagination if want.
Another method:
var longArray = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
var size = 2;
var newArray = new Array(Math.ceil(longArray.length / size)).fill("")
.map(function() { return this.splice(0, size) }, longArray.slice());
// newArray = [[1, 2], [3, 4], [5, 6], [7, 8], [9, 10]];
This doesn't affect the original array as a copy, made using slice, is passed into the 'this' argument of map.
Another implementation, using Array.reduce (I think it’s the only one missing!):
const splitArray = (arr, size) =>
{
if (size === 0) {
return [];
}
return arr.reduce((split, element, index) => {
index % size === 0 ? split.push([element]) : split[Math.floor(index / size)].push(element);
return split;
}, []);
};
As many solutions above, this one’s non-destructive. Returning an empty array when the size is 0 is just a convention. If the if block is omitted you get an error, which might be what you want.
More compact:
const chunk = (xs, size) =>
xs.map((_, i) =>
(i % size === 0 ? xs.slice(i, i + size) : null)).filter(Boolean);
// Usage:
const sampleArray = new Array(33).fill(undefined).map((_, i) => i);
console.log(chunk(sampleArray, 5));
function chunkArrayInGroups(arr, size) {
var newArr=[];
for (var i=0; i < arr.length; i+= size){
newArr.push(arr.slice(i,i+size));
}
return newArr;
}
chunkArrayInGroups([0, 1, 2, 3, 4, 5, 6], 3);
Using ES6 Generators
Late to the party, but ES6 generators opened up another neat way to achieve what is asked for.
/**
* Returns chunks of size n.
* #param {Array<any>} array any array
* #param {number} n size of chunk
*/
function* chunks(array, n){
for(let i = 0; i < array.length; i += n) yield array.slice(i, i + n);
}
const result = [...chunks([1, 2, 3, 4, 5, 6, 7, 8 , 9, 10], 3)];
console.log(result);
.as-console-wrapper { max-height: 100% !important; top: 0; }
Make it work for infinite Generators
Using the same idea you can create a generator which can also generate an infinite amount of n-sized chunks from values retrieved from another (possibly infinite) generator function. This can be very handy to lazy-generate values once they are required which significantly reduces the required memory or it can even be used to generate a possibly infinite/ unknown number of chunks.
Here an example which uses two generators.
nextNaturalNumber() is an infinite generator which always returns the next natural number. I am using the ES2020 bigint datatype here so there is no restriction (by JavaScript) for the size of the value.
chunksFromIterable() creates n-sized chunks from an possibly infinite iterable.
/**
* Returns chunks of size n for a possibly infinite iterator.
* n must be >= 1
* #param {Iterable<any>} iterable any array
* #param {number} n size of chunk for n >= 1
*/
function* chunksFromIterable(iterable, n){
let arr = [];
let i = n;
for (const value of iterable) {
if(i <= 0) {
// another chunk of size n is filled => return chunk
yield arr;
arr = []; // create new empty array
i = n;
};
arr.push(value);
i--;
}
// in case the iterable is not infinite check if there are still values in the array and return them if necessary
if(arr.length > 0) yield arr;
}
/**
* Infinite iterator which always gets the next natural number.
*/
function* nextNaturalNumber(){
let i = 0n;
while(true) {
i += 1n;
yield i;
}
}
console.log("Finite iterable:");
// this version can now be used using the for ... of loop
for(const threeNaturalNumbers of chunksFromIterable([1, 2, 3, 4, 5, 6, 7, 8 , 9, 10], 3)){
console.log(threeNaturalNumbers);
}
console.log("Infinite iterable:");
// and it can also be used for this infinite generator
for(const threeNaturalNumbers of chunksFromIterable(nextNaturalNumber(), 3)){
printBigIntArray(threeNaturalNumbers);
if(threeNaturalNumbers[0] > 30) break; // end here to avoid an infinite loop
}
// helper function to print array of bigints as this does not seem to be working for snippets
function printBigIntArray(arr){
console.log(`[${arr.join(", ")}]`);
}
.as-console-wrapper { max-height: 100% !important; top: 0; }
You can take a look at this code . Simple and Effective .
function chunkArrayInGroups(array, unit) {
var results = [],
length = Math.ceil(array.length / unit);
for (var i = 0; i < length; i++) {
results.push(array.slice(i * unit, (i + 1) * unit));
}
return results;
}
chunkArrayInGroups(["a", "b", "c", "d"], 2);
Here is a simple one liner
var segment = (arr, n) => arr.reduce((r,e,i) => i%n ? (r[r.length-1].push(e), r)
: (r.push([e]), r), []),
arr = Array.from({length: 31}).map((_,i) => i+1);
console.log(segment(arr,7));
as a function
var arrayChunk = function (array, chunkSize) {
var arrayOfArrays = [];
if (array.length <= chunkSize) {
arrayOfArrays.push(array);
} else {
for (var i=0; i<array.length; i+=chunkSize) {
arrayOfArrays.push(array.slice(i,i+chunkSize));
}
}
return arrayOfArrays;
}
to use
arrayChunk(originalArray, 10) //10 being the chunk size.
using recursion
let myArr = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16];
let size = 4; //Math.sqrt(myArr.length); --> For a n x n matrix
let tempArr = [];
function createMatrix(arr, i) {
if (arr.length !== 0) {
if(i % size == 0) {
tempArr.push(arr.splice(0,size))
}
createMatrix(arr, i - 1)
}
}
createMatrix(myArr, myArr.length);
console.log(tempArr);
Note: The existing array i.e. myArr will be modified.
using prototype we can set directly to array class
Array.prototype.chunk = function(n) {
if (!this.length) {
return [];
}
return [this.slice(0, n)].concat(this.slice(n).chunk(n));
};
console.log([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15].chunk(5));
let original = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14];
let size = 5;
let fragments = Array.from(Array(Math.ceil(a.length / size))).map((_,index) => a.slice(index * size,(index + 1) * size))
If you want a method that doesn't modify the existing array, try this:
let oldArray = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15];
let newArray = [];
let size = 3; // Size of chunks you are after
let j = 0; // This helps us keep track of the child arrays
for (var i = 0; i < oldArray.length; i++) {
if (i % size === 0) {
j++
}
if(!newArray[j]) newArray[j] = [];
newArray[j].push(oldArray[i])
}
function chunkArrayInGroups(arr, size) {
var newArr=[];
for (var i=0; arr.length>size; i++){
newArr.push(arr.splice(0,size));
}
newArr.push(arr.slice(0));
return newArr;
}
chunkArrayInGroups([0, 1, 2, 3, 4, 5, 6], 3);
You can use the following code to achieve the required functionality
const splitter = (arr, splitBy, cache = []) => {
const tmp = [...arr]
while (tmp.length) cache.push(tmp.splice(0, splitBy))
return cache
}
const split = splitter([0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21], 10)
console.log(split);
Notice, it was an array of length 22 then the splitter function splits it into 2 smaller arrays of 10 items and 1 array of 2 items.
let a = [1, 2, 3, 4, 6, 7, 8, 9, 10, 11];
let _splitCount = 3;
let b = a.reduce((acc, curr, index) => {
if (index % _splitCount === 0) {
acc.push([curr]);
} else {
acc[acc.length - 1].push(curr);
}
return acc;
}, []);
this is the easy solution i think❤️
You can use the below function if you know the number array (numGroups) to be split.
function createGroups(arr, numGroups) {
const perGroup = Math.ceil(arr.length / numGroups);
return new Array(numGroups)
.fill('')
.map((_, i) => arr.slice(i * perGroup, (i + 1) * perGroup));
}
Sample Use:
createGroups([0, 1, 2, 3, 4, 5, 6], 3); //arr = [0, 1, 2, 3, 4, 5, 6] and numGroups = 3
Related
Let's say that I have an Javascript array looking as following:
["Element 1","Element 2","Element 3",...]; // with close to a hundred elements.
What approach would be appropriate to chunk (split) the array into many smaller arrays with, lets say, 10 elements at its most?
The array.slice() method can extract a slice from the beginning, middle, or end of an array for whatever purposes you require, without changing the original array.
const chunkSize = 10;
for (let i = 0; i < array.length; i += chunkSize) {
const chunk = array.slice(i, i + chunkSize);
// do whatever
}
The last chunk may be smaller than chunkSize. For example when given an array of 12 elements the first chunk will have 10 elements, the second chunk only has 2.
Note that a chunkSize of 0 will cause an infinite loop.
Here's a ES6 version using reduce
const perChunk = 2 // items per chunk
const inputArray = ['a','b','c','d','e']
const result = inputArray.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index/perChunk)
if(!resultArray[chunkIndex]) {
resultArray[chunkIndex] = [] // start a new chunk
}
resultArray[chunkIndex].push(item)
return resultArray
}, [])
console.log(result); // result: [['a','b'], ['c','d'], ['e']]
And you're ready to chain further map/reduce transformations.
Your input array is left intact
If you prefer a shorter but less readable version, you can sprinkle some concat into the mix for the same end result:
inputArray.reduce((all,one,i) => {
const ch = Math.floor(i/perChunk);
all[ch] = [].concat((all[ch]||[]),one);
return all
}, [])
You can use remainder operator to put consecutive items into different chunks:
const ch = (i % perChunk);
Modified from an answer by dbaseman: https://stackoverflow.com/a/10456344/711085
Object.defineProperty(Array.prototype, 'chunk_inefficient', {
value: function(chunkSize) {
var array = this;
return [].concat.apply([],
array.map(function(elem, i) {
return i % chunkSize ? [] : [array.slice(i, i + chunkSize)];
})
);
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk_inefficient(3)
)
// [[1, 2, 3], [4, 5, 6], [7]]
minor addendum:
I should point out that the above is a not-that-elegant (in my mind) workaround to use Array.map. It basically does the following, where ~ is concatenation:
[[1,2,3]]~[]~[]~[] ~ [[4,5,6]]~[]~[]~[] ~ [[7]]
It has the same asymptotic running time as the method below, but perhaps a worse constant factor due to building empty lists. One could rewrite this as follows (mostly the same as Blazemonger's method, which is why I did not originally submit this answer):
More efficient method:
// refresh page if experimenting and you already defined Array.prototype.chunk
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
var R = [];
for (var i = 0; i < this.length; i += chunkSize)
R.push(this.slice(i, i + chunkSize));
return R;
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk(3)
)
My preferred way nowadays is the above, or one of the following:
Array.range = function(n) {
// Array.range(5) --> [0,1,2,3,4]
return Array.apply(null,Array(n)).map((x,i) => i)
};
Object.defineProperty(Array.prototype, 'chunk', {
value: function(n) {
// ACTUAL CODE FOR CHUNKING ARRAY:
return Array.range(Math.ceil(this.length/n)).map((x,i) => this.slice(i*n,i*n+n));
}
});
Demo:
> JSON.stringify( Array.range(10).chunk(3) );
[[1,2,3],[4,5,6],[7,8,9],[10]]
Or if you don't want an Array.range function, it's actually just a one-liner (excluding the fluff):
var ceil = Math.ceil;
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array(ceil(this.length/n)).fill().map((_,i) => this.slice(i*n,i*n+n));
}});
or
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array.from(Array(ceil(this.length/n)), (_,i)=>this.slice(i*n,i*n+n));
}});
Try to avoid mucking with native prototypes, including Array.prototype, if you don't know who will be consuming your code (3rd parties, coworkers, yourself at a later date, etc.).
There are ways to safely extend prototypes (but not in all browsers) and there are ways to safely consume objects created from extended prototypes, but a better rule of thumb is to follow the Principle of Least Surprise and avoid these practices altogether.
If you have some time, watch Andrew Dupont's JSConf 2011 talk, "Everything is Permitted: Extending Built-ins", for a good discussion about this topic.
But back to the question, while the solutions above will work, they are overly complex and requiring unnecessary computational overhead. Here is my solution:
function chunk (arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
// Optionally, you can do the following to avoid cluttering the global namespace:
Array.chunk = chunk;
Using generators
function* chunks(arr, n) {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
let someArray = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
console.log([...chunks(someArray, 2)]) // [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]]
Can be typed with Typescript like so:
function* chunks<T>(arr: T[], n: number): Generator<T[], void> {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
I tested the different answers into jsperf.com. The result is available there: https://web.archive.org/web/20150909134228/https://jsperf.com/chunk-mtds
And the fastest function (and that works from IE8) is this one:
function chunk(arr, chunkSize) {
if (chunkSize <= 0) throw "Invalid chunk size";
var R = [];
for (var i=0,len=arr.length; i<len; i+=chunkSize)
R.push(arr.slice(i,i+chunkSize));
return R;
}
Splice version using ES6
let [list,chunkSize] = [[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15], 6];
list = [...Array(Math.ceil(list.length / chunkSize))].map(_ => list.splice(0,chunkSize))
console.log(list);
I'd prefer to use splice method:
var chunks = function(array, size) {
var results = [];
while (array.length) {
results.push(array.splice(0, size));
}
return results;
};
Nowadays you can use lodash' chunk function to split the array into smaller arrays https://lodash.com/docs#chunk No need to fiddle with the loops anymore!
Old question: New answer! I actually was working with an answer from this question and had a friend improve on it! So here it is:
Array.prototype.chunk = function ( n ) {
if ( !this.length ) {
return [];
}
return [ this.slice( 0, n ) ].concat( this.slice(n).chunk(n) );
};
[1,2,3,4,5,6,7,8,9,0].chunk(3);
> [[1,2,3],[4,5,6],[7,8,9],[0]]
One more solution using Array.prototype.reduce():
const chunk = (array, size) =>
array.reduce((acc, _, i) => {
if (i % size === 0) acc.push(array.slice(i, i + size))
return acc
}, [])
// Usage:
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
const chunked = chunk(numbers, 3)
console.log(chunked)
This solution is very similar to the solution by Steve Holgado. However, because this solution doesn't utilize array spreading and doesn't create new arrays in the reducer function, it's faster (see jsPerf test) and subjectively more readable (simpler syntax) than the other solution.
At every nth iteration (where n = size; starting at the first iteration), the accumulator array (acc) is appended with a chunk of the array (array.slice(i, i + size)) and then returned. At other iterations, the accumulator array is returned as-is.
If size is zero, the method returns an empty array. If size is negative, the method returns broken results. So, if needed in your case, you may want to do something about negative or non-positive size values.
If speed is important in your case, a simple for loop would be faster than using reduce() (see the jsPerf test), and some may find this style more readable as well:
function chunk(array, size) {
// This prevents infinite loops
if (size < 1) throw new Error('Size must be positive')
const result = []
for (let i = 0; i < array.length; i += size) {
result.push(array.slice(i, i + size))
}
return result
}
There have been many answers but this is what I use:
const chunk = (arr, size) =>
arr
.reduce((acc, _, i) =>
(i % size)
? acc
: [...acc, arr.slice(i, i + size)]
, [])
// USAGE
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
chunk(numbers, 3)
// [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
First, check for a remainder when dividing the index by the chunk size.
If there is a remainder then just return the accumulator array.
If there is no remainder then the index is divisible by the chunk size, so take a slice from the original array (starting at the current index) and add it to the accumulator array.
So, the returned accumulator array for each iteration of reduce looks something like this:
// 0: [[1, 2, 3]]
// 1: [[1, 2, 3]]
// 2: [[1, 2, 3]]
// 3: [[1, 2, 3], [4, 5, 6]]
// 4: [[1, 2, 3], [4, 5, 6]]
// 5: [[1, 2, 3], [4, 5, 6]]
// 6: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 7: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 8: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 9: [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
I think this a nice recursive solution with ES6 syntax:
const chunk = function(array, size) {
if (!array.length) {
return [];
}
const head = array.slice(0, size);
const tail = array.slice(size);
return [head, ...chunk(tail, size)];
};
console.log(chunk([1,2,3], 2));
ONE-LINER
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
For TypeScript
const chunk = <T>(arr: T[], size: number): T[][] =>
[...Array(Math.ceil(arr.length / size))].map((_, i) =>
arr.slice(size * i, size + size * i)
);
DEMO
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
document.write(JSON.stringify(chunk([1, 2, 3, 4], 2)));
Chunk By Number Of Groups
const part=(a,n)=>[...Array(n)].map((_,i)=>a.slice(i*Math.ceil(a.length/n),(i+1)*Math.ceil(a.length/n)));
For TypeScript
const part = <T>(a: T[], n: number): T[][] => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
DEMO
const part = (a, n) => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6], 2))+'<br/>');
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6, 7], 2)));
Ok, let's start with a fairly tight one:
function chunk(arr, n) {
return arr.slice(0,(arr.length+n-1)/n|0).
map(function(c,i) { return arr.slice(n*i,n*i+n); });
}
Which is used like this:
chunk([1,2,3,4,5,6,7], 2);
Then we have this tight reducer function:
function chunker(p, c, i) {
(p[i/this|0] = p[i/this|0] || []).push(c);
return p;
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker.bind(3),[]);
Since a kitten dies when we bind this to a number, we can do manual currying like this instead:
// Fluent alternative API without prototype hacks.
function chunker(n) {
return function(p, c, i) {
(p[i/n|0] = p[i/n|0] || []).push(c);
return p;
};
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker(3),[]);
Then the still pretty tight function which does it all in one go:
function chunk(arr, n) {
return arr.reduce(function(p, cur, i) {
(p[i/n|0] = p[i/n|0] || []).push(cur);
return p;
},[]);
}
chunk([1,2,3,4,5,6,7], 3);
I aimed at creating a simple non-mutating solution in pure ES6. Peculiarities in javascript make it necessary to fill the empty array before mapping :-(
function chunk(a, l) {
return new Array(Math.ceil(a.length / l)).fill(0)
.map((_, n) => a.slice(n*l, n*l + l));
}
This version with recursion seem simpler and more compelling:
function chunk(a, l) {
if (a.length == 0) return [];
else return [a.slice(0, l)].concat(chunk(a.slice(l), l));
}
The ridiculously weak array functions of ES6 makes for good puzzles :-)
Created a npm package for this https://www.npmjs.com/package/array.chunk
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.slice(i, size + i));
}
return result;
When using a TypedArray
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.subarray(i, size + i));
}
return result;
Using Array.prototype.splice() and splice it until the array has element.
Array.prototype.chunk = function(size) {
let result = [];
while(this.length) {
result.push(this.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log(arr.chunk(2));
Update
Array.prototype.splice() populates the original array and after performing the chunk() the original array (arr) becomes [].
So if you want to keep the original array untouched, then copy and keep the arr data into another array and do the same thing.
Array.prototype.chunk = function(size) {
let data = [...this];
let result = [];
while(data.length) {
result.push(data.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log('chunked:', arr.chunk(2));
console.log('original', arr);
P.S: Thanks to #mts-knn for mentioning the matter.
I recommend using lodash. Chunking is one of many useful functions there.
Instructions:
npm i --save lodash
Include in your project:
import * as _ from 'lodash';
Usage:
const arrayOfElements = ["Element 1","Element 2","Element 3", "Element 4", "Element 5","Element 6","Element 7","Element 8","Element 9","Element 10","Element 11","Element 12"]
const chunkedElements = _.chunk(arrayOfElements, 10)
You can find my sample here:
https://playcode.io/659171/
The following ES2015 approach works without having to define a function and directly on anonymous arrays (example with chunk size 2):
[11,22,33,44,55].map((_, i, all) => all.slice(2*i, 2*i+2)).filter(x=>x.length)
If you want to define a function for this, you could do it as follows (improving on K._'s comment on Blazemonger's answer):
const array_chunks = (array, chunk_size) => array
.map((_, i, all) => all.slice(i*chunk_size, (i+1)*chunk_size))
.filter(x => x.length)
If you use EcmaScript version >= 5.1, you can implement a functional version of chunk() using array.reduce() that has O(N) complexity:
function chunk(chunkSize, array) {
return array.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = []; // 1
previous.push(chunk); // 2
}
else {
chunk = previous[previous.length -1]; // 3
}
chunk.push(current); // 4
return previous; // 5
}, []); // 6
}
console.log(chunk(2, ['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b' ], [ 'c', 'd' ], [ 'e' ] ]
Explanation of each // nbr above:
Create a new chunk if the previous value, i.e. the previously returned array of chunks, is empty or if the last previous chunk has chunkSize items
Add the new chunk to the array of existing chunks
Otherwise, the current chunk is the last chunk in the array of chunks
Add the current value to the chunk
Return the modified array of chunks
Initialize the reduction by passing an empty array
Currying based on chunkSize:
var chunk3 = function(array) {
return chunk(3, array);
};
console.log(chunk3(['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b', 'c' ], [ 'd', 'e' ] ]
You can add the chunk() function to the global Array object:
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
return this.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = [];
previous.push(chunk);
}
else {
chunk = previous[previous.length -1];
}
chunk.push(current);
return previous;
}, []);
}
});
console.log(['a', 'b', 'c', 'd', 'e'].chunk(4));
// prints [ [ 'a', 'b', 'c' 'd' ], [ 'e' ] ]
Use chunk from lodash
lodash.chunk(arr,<size>).forEach(chunk=>{
console.log(chunk);
})
js
function splitToBulks(arr, bulkSize = 20) {
const bulks = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
console.log(splitToBulks([1, 2, 3, 4, 5, 6, 7], 3));
typescript
function splitToBulks<T>(arr: T[], bulkSize: number = 20): T[][] {
const bulks: T[][] = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
results = []
chunk_size = 10
while(array.length > 0){
results.push(array.splice(0, chunk_size))
}
The one line in pure javascript:
function chunks(array, size) {
return Array.apply(0,{length: Math.ceil(array.length / size)}).map((_, index) => array.slice(index*size, (index+1)*size))
}
// The following will group letters of the alphabet by 4
console.log(chunks([...Array(26)].map((x,i)=>String.fromCharCode(i + 97)), 4))
Here is an example where I split an array into chunks of 2 elements, simply by splicing chunks out of the array until the original array is empty.
const array = [86,133,87,133,88,133,89,133,90,133];
const new_array = [];
const chunksize = 2;
while (array.length) {
const chunk = array.splice(0,chunksize);
new_array.push(chunk);
}
console.log(new_array)
You can use the Array.prototype.reduce function to do this in one line.
let arr = [1,2,3,4];
function chunk(arr, size)
{
let result = arr.reduce((rows, key, index) => (index % size == 0 ? rows.push([key]) : rows[rows.length-1].push(key)) && rows, []);
return result;
}
console.log(chunk(arr,2));
const array = ['a', 'b', 'c', 'd', 'e'];
const size = 2;
const chunks = [];
while (array.length) {
chunks.push(array.splice(0, size));
}
console.log(chunks);
in coffeescript:
b = (a.splice(0, len) while a.length)
demo
a = [1, 2, 3, 4, 5, 6, 7]
b = (a.splice(0, 2) while a.length)
[ [ 1, 2 ],
[ 3, 4 ],
[ 5, 6 ],
[ 7 ] ]
And this would be my contribution to this topic. I guess .reduce() is the best way.
var segment = (arr, n) => arr.reduce((r,e,i) => i%n ? (r[r.length-1].push(e), r)
: (r.push([e]), r), []),
arr = Array.from({length: 31}).map((_,i) => i+1);
res = segment(arr,7);
console.log(JSON.stringify(res));
But the above implementation is not very efficient since .reduce() runs through all arr function. A more efficient approach (very close to the fastest imperative solution) would be, iterating over the reduced (to be chunked) array since we can calculate it's size in advance by Math.ceil(arr/n);. Once we have the empty result array like Array(Math.ceil(arr.length/n)).fill(); the rest is to map slices of the arr array into it.
function chunk(arr,n){
var r = Array(Math.ceil(arr.length/n)).fill();
return r.map((e,i) => arr.slice(i*n, i*n+n));
}
arr = Array.from({length: 31},(_,i) => i+1);
res = chunk(arr,7);
console.log(JSON.stringify(res));
So far so good but we can still simplify the above snipet further.
var chunk = (a,n) => Array.from({length: Math.ceil(a.length/n)}, (_,i) => a.slice(i*n, i*n+n)),
arr = Array.from({length: 31},(_,i) => i+1),
res = chunk(arr,7);
console.log(JSON.stringify(res));
I had an whiteboard task that stumped me in the interview, however I have written a solution and wondered if anyone has improvements on it as I'm iterating which the interviewer said not to. The two arrays must be merged with the order being array1[0], array2[0], array1[1], array2[1]... (see expectedResult) etc
const options = [[1, 12, 5], ["a", "b", "c", "d", "e"]]
const expectedResult = [1, "a", 12, "b", 5, "c", "d", "e"]
function mergeArrays(first, second) {
let returnArray = []
first.forEach((value, key) => {
returnArray.push(value)
if (second[key]) returnArray.push(second[key])
if (!first[key + 1] && second[key + 1]) {
returnArray.push(
...second.slice(key + 1, second.length)
)
}
})
return returnArray
}
const result = mergeArrays(options[0], options[1])
console.log(result.toString() === expectedResult.toString(), result)
With reduce (as an alternative to the classical for/while loop control structures)
const options = [[1, 12, 5], ["a", "b", "c", "d", "e"]];
const expectedResult = [1, "a", 12, "b", 5, "c", "d", "e"]
// a is the accumulator
// cV, cI are resp. current value and current index
result = options[0].reduce(function (a, cV, cI) {
return a.concat([cV,options[1][cI]]);
},[]);
result = result.concat(options[1].splice(options[0].length));
console.log(result.toString() === expectedResult.toString(), result)
At each step two elements are added to the accumulator array a using concat.
I go the classic way, with a while loop, because it minimize the checks inside of the loop and appends without another check just the rest of one of the arrays.
function mergeArrays(first, second) {
var min = Math.min(first.length, second.length),
i = 0,
result = [];
while (i < min) {
result.push(first[i], second[i]);
++i;
}
return result.concat(first.slice(min), second.slice(min));
}
const options = [[1, 12, 5], ["a", "b", "c", "d", "e"]];
console.log(mergeArrays(...options));
.as-console-wrapper { max-height: 100% !important; top: 0; }
Instead of using value in if conditions , check for length of array.
Problems I see in code are at conditions
if (second[key]) returnArray.push(second[key])
// will not run if second[key] is 0,null,undefined.
if (!first[key + 1] && second[key + 1])
// will produce unwanted result if value reference is 0,null,undefined.
so instead, check for length would produce better result
So the condition
if (second[key]) returnArray.push(second[key])
can be changed into
if( second.length > key) returnArray.push(second[key])
You can use a recursive zipping function, using spread to feed an array of two into it as its parameters:
var z = (a, b) => a.length ? [a[0], ...z(b, a.slice(1))] : b;
var options =
[
[1, 12, 5],
["a", "b", "c", "d", "e"]
];
var expectedResult = z(...options);
console.log(JSON.stringify(expectedResult));
or for any number of array inputs:
var z = (a = [], ...b) =>
b.length ? a.length ? [a[0], ...z(...b, a.slice(1))] : z(...b) : a;
var options =
[
[1, 2],
'♦♡♣♤♥♢',
['A', 'B', 'C'],
['😊', '😔', '😠'],
[null, NaN, undefined]
];
var expectedResult = z(...options);
var stringify = (o) => JSON.stringify(o, (k, v) => v === undefined ? '__undefined__' : v !== v ? '__NaN__' : v).replace(/"__undefined__"/g, 'undefined').replace(/"__NaN__"/g, 'NaN');
console.log(stringify(expectedResult));
Let's say that I have an Javascript array looking as following:
["Element 1","Element 2","Element 3",...]; // with close to a hundred elements.
What approach would be appropriate to chunk (split) the array into many smaller arrays with, lets say, 10 elements at its most?
The array.slice() method can extract a slice from the beginning, middle, or end of an array for whatever purposes you require, without changing the original array.
const chunkSize = 10;
for (let i = 0; i < array.length; i += chunkSize) {
const chunk = array.slice(i, i + chunkSize);
// do whatever
}
The last chunk may be smaller than chunkSize. For example when given an array of 12 elements the first chunk will have 10 elements, the second chunk only has 2.
Note that a chunkSize of 0 will cause an infinite loop.
Here's a ES6 version using reduce
const perChunk = 2 // items per chunk
const inputArray = ['a','b','c','d','e']
const result = inputArray.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index/perChunk)
if(!resultArray[chunkIndex]) {
resultArray[chunkIndex] = [] // start a new chunk
}
resultArray[chunkIndex].push(item)
return resultArray
}, [])
console.log(result); // result: [['a','b'], ['c','d'], ['e']]
And you're ready to chain further map/reduce transformations.
Your input array is left intact
If you prefer a shorter but less readable version, you can sprinkle some concat into the mix for the same end result:
inputArray.reduce((all,one,i) => {
const ch = Math.floor(i/perChunk);
all[ch] = [].concat((all[ch]||[]),one);
return all
}, [])
You can use remainder operator to put consecutive items into different chunks:
const ch = (i % perChunk);
Modified from an answer by dbaseman: https://stackoverflow.com/a/10456344/711085
Object.defineProperty(Array.prototype, 'chunk_inefficient', {
value: function(chunkSize) {
var array = this;
return [].concat.apply([],
array.map(function(elem, i) {
return i % chunkSize ? [] : [array.slice(i, i + chunkSize)];
})
);
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk_inefficient(3)
)
// [[1, 2, 3], [4, 5, 6], [7]]
minor addendum:
I should point out that the above is a not-that-elegant (in my mind) workaround to use Array.map. It basically does the following, where ~ is concatenation:
[[1,2,3]]~[]~[]~[] ~ [[4,5,6]]~[]~[]~[] ~ [[7]]
It has the same asymptotic running time as the method below, but perhaps a worse constant factor due to building empty lists. One could rewrite this as follows (mostly the same as Blazemonger's method, which is why I did not originally submit this answer):
More efficient method:
// refresh page if experimenting and you already defined Array.prototype.chunk
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
var R = [];
for (var i = 0; i < this.length; i += chunkSize)
R.push(this.slice(i, i + chunkSize));
return R;
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk(3)
)
My preferred way nowadays is the above, or one of the following:
Array.range = function(n) {
// Array.range(5) --> [0,1,2,3,4]
return Array.apply(null,Array(n)).map((x,i) => i)
};
Object.defineProperty(Array.prototype, 'chunk', {
value: function(n) {
// ACTUAL CODE FOR CHUNKING ARRAY:
return Array.range(Math.ceil(this.length/n)).map((x,i) => this.slice(i*n,i*n+n));
}
});
Demo:
> JSON.stringify( Array.range(10).chunk(3) );
[[1,2,3],[4,5,6],[7,8,9],[10]]
Or if you don't want an Array.range function, it's actually just a one-liner (excluding the fluff):
var ceil = Math.ceil;
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array(ceil(this.length/n)).fill().map((_,i) => this.slice(i*n,i*n+n));
}});
or
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array.from(Array(ceil(this.length/n)), (_,i)=>this.slice(i*n,i*n+n));
}});
Try to avoid mucking with native prototypes, including Array.prototype, if you don't know who will be consuming your code (3rd parties, coworkers, yourself at a later date, etc.).
There are ways to safely extend prototypes (but not in all browsers) and there are ways to safely consume objects created from extended prototypes, but a better rule of thumb is to follow the Principle of Least Surprise and avoid these practices altogether.
If you have some time, watch Andrew Dupont's JSConf 2011 talk, "Everything is Permitted: Extending Built-ins", for a good discussion about this topic.
But back to the question, while the solutions above will work, they are overly complex and requiring unnecessary computational overhead. Here is my solution:
function chunk (arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
// Optionally, you can do the following to avoid cluttering the global namespace:
Array.chunk = chunk;
Using generators
function* chunks(arr, n) {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
let someArray = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
console.log([...chunks(someArray, 2)]) // [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]]
Can be typed with Typescript like so:
function* chunks<T>(arr: T[], n: number): Generator<T[], void> {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
I tested the different answers into jsperf.com. The result is available there: https://web.archive.org/web/20150909134228/https://jsperf.com/chunk-mtds
And the fastest function (and that works from IE8) is this one:
function chunk(arr, chunkSize) {
if (chunkSize <= 0) throw "Invalid chunk size";
var R = [];
for (var i=0,len=arr.length; i<len; i+=chunkSize)
R.push(arr.slice(i,i+chunkSize));
return R;
}
Splice version using ES6
let [list,chunkSize] = [[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15], 6];
list = [...Array(Math.ceil(list.length / chunkSize))].map(_ => list.splice(0,chunkSize))
console.log(list);
I'd prefer to use splice method:
var chunks = function(array, size) {
var results = [];
while (array.length) {
results.push(array.splice(0, size));
}
return results;
};
Nowadays you can use lodash' chunk function to split the array into smaller arrays https://lodash.com/docs#chunk No need to fiddle with the loops anymore!
Old question: New answer! I actually was working with an answer from this question and had a friend improve on it! So here it is:
Array.prototype.chunk = function ( n ) {
if ( !this.length ) {
return [];
}
return [ this.slice( 0, n ) ].concat( this.slice(n).chunk(n) );
};
[1,2,3,4,5,6,7,8,9,0].chunk(3);
> [[1,2,3],[4,5,6],[7,8,9],[0]]
One more solution using Array.prototype.reduce():
const chunk = (array, size) =>
array.reduce((acc, _, i) => {
if (i % size === 0) acc.push(array.slice(i, i + size))
return acc
}, [])
// Usage:
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
const chunked = chunk(numbers, 3)
console.log(chunked)
This solution is very similar to the solution by Steve Holgado. However, because this solution doesn't utilize array spreading and doesn't create new arrays in the reducer function, it's faster (see jsPerf test) and subjectively more readable (simpler syntax) than the other solution.
At every nth iteration (where n = size; starting at the first iteration), the accumulator array (acc) is appended with a chunk of the array (array.slice(i, i + size)) and then returned. At other iterations, the accumulator array is returned as-is.
If size is zero, the method returns an empty array. If size is negative, the method returns broken results. So, if needed in your case, you may want to do something about negative or non-positive size values.
If speed is important in your case, a simple for loop would be faster than using reduce() (see the jsPerf test), and some may find this style more readable as well:
function chunk(array, size) {
// This prevents infinite loops
if (size < 1) throw new Error('Size must be positive')
const result = []
for (let i = 0; i < array.length; i += size) {
result.push(array.slice(i, i + size))
}
return result
}
There have been many answers but this is what I use:
const chunk = (arr, size) =>
arr
.reduce((acc, _, i) =>
(i % size)
? acc
: [...acc, arr.slice(i, i + size)]
, [])
// USAGE
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
chunk(numbers, 3)
// [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
First, check for a remainder when dividing the index by the chunk size.
If there is a remainder then just return the accumulator array.
If there is no remainder then the index is divisible by the chunk size, so take a slice from the original array (starting at the current index) and add it to the accumulator array.
So, the returned accumulator array for each iteration of reduce looks something like this:
// 0: [[1, 2, 3]]
// 1: [[1, 2, 3]]
// 2: [[1, 2, 3]]
// 3: [[1, 2, 3], [4, 5, 6]]
// 4: [[1, 2, 3], [4, 5, 6]]
// 5: [[1, 2, 3], [4, 5, 6]]
// 6: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 7: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 8: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 9: [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
I think this a nice recursive solution with ES6 syntax:
const chunk = function(array, size) {
if (!array.length) {
return [];
}
const head = array.slice(0, size);
const tail = array.slice(size);
return [head, ...chunk(tail, size)];
};
console.log(chunk([1,2,3], 2));
ONE-LINER
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
For TypeScript
const chunk = <T>(arr: T[], size: number): T[][] =>
[...Array(Math.ceil(arr.length / size))].map((_, i) =>
arr.slice(size * i, size + size * i)
);
DEMO
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
document.write(JSON.stringify(chunk([1, 2, 3, 4], 2)));
Chunk By Number Of Groups
const part=(a,n)=>[...Array(n)].map((_,i)=>a.slice(i*Math.ceil(a.length/n),(i+1)*Math.ceil(a.length/n)));
For TypeScript
const part = <T>(a: T[], n: number): T[][] => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
DEMO
const part = (a, n) => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6], 2))+'<br/>');
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6, 7], 2)));
Ok, let's start with a fairly tight one:
function chunk(arr, n) {
return arr.slice(0,(arr.length+n-1)/n|0).
map(function(c,i) { return arr.slice(n*i,n*i+n); });
}
Which is used like this:
chunk([1,2,3,4,5,6,7], 2);
Then we have this tight reducer function:
function chunker(p, c, i) {
(p[i/this|0] = p[i/this|0] || []).push(c);
return p;
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker.bind(3),[]);
Since a kitten dies when we bind this to a number, we can do manual currying like this instead:
// Fluent alternative API without prototype hacks.
function chunker(n) {
return function(p, c, i) {
(p[i/n|0] = p[i/n|0] || []).push(c);
return p;
};
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker(3),[]);
Then the still pretty tight function which does it all in one go:
function chunk(arr, n) {
return arr.reduce(function(p, cur, i) {
(p[i/n|0] = p[i/n|0] || []).push(cur);
return p;
},[]);
}
chunk([1,2,3,4,5,6,7], 3);
I aimed at creating a simple non-mutating solution in pure ES6. Peculiarities in javascript make it necessary to fill the empty array before mapping :-(
function chunk(a, l) {
return new Array(Math.ceil(a.length / l)).fill(0)
.map((_, n) => a.slice(n*l, n*l + l));
}
This version with recursion seem simpler and more compelling:
function chunk(a, l) {
if (a.length == 0) return [];
else return [a.slice(0, l)].concat(chunk(a.slice(l), l));
}
The ridiculously weak array functions of ES6 makes for good puzzles :-)
Created a npm package for this https://www.npmjs.com/package/array.chunk
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.slice(i, size + i));
}
return result;
When using a TypedArray
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.subarray(i, size + i));
}
return result;
Using Array.prototype.splice() and splice it until the array has element.
Array.prototype.chunk = function(size) {
let result = [];
while(this.length) {
result.push(this.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log(arr.chunk(2));
Update
Array.prototype.splice() populates the original array and after performing the chunk() the original array (arr) becomes [].
So if you want to keep the original array untouched, then copy and keep the arr data into another array and do the same thing.
Array.prototype.chunk = function(size) {
let data = [...this];
let result = [];
while(data.length) {
result.push(data.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log('chunked:', arr.chunk(2));
console.log('original', arr);
P.S: Thanks to #mts-knn for mentioning the matter.
I recommend using lodash. Chunking is one of many useful functions there.
Instructions:
npm i --save lodash
Include in your project:
import * as _ from 'lodash';
Usage:
const arrayOfElements = ["Element 1","Element 2","Element 3", "Element 4", "Element 5","Element 6","Element 7","Element 8","Element 9","Element 10","Element 11","Element 12"]
const chunkedElements = _.chunk(arrayOfElements, 10)
You can find my sample here:
https://playcode.io/659171/
The following ES2015 approach works without having to define a function and directly on anonymous arrays (example with chunk size 2):
[11,22,33,44,55].map((_, i, all) => all.slice(2*i, 2*i+2)).filter(x=>x.length)
If you want to define a function for this, you could do it as follows (improving on K._'s comment on Blazemonger's answer):
const array_chunks = (array, chunk_size) => array
.map((_, i, all) => all.slice(i*chunk_size, (i+1)*chunk_size))
.filter(x => x.length)
If you use EcmaScript version >= 5.1, you can implement a functional version of chunk() using array.reduce() that has O(N) complexity:
function chunk(chunkSize, array) {
return array.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = []; // 1
previous.push(chunk); // 2
}
else {
chunk = previous[previous.length -1]; // 3
}
chunk.push(current); // 4
return previous; // 5
}, []); // 6
}
console.log(chunk(2, ['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b' ], [ 'c', 'd' ], [ 'e' ] ]
Explanation of each // nbr above:
Create a new chunk if the previous value, i.e. the previously returned array of chunks, is empty or if the last previous chunk has chunkSize items
Add the new chunk to the array of existing chunks
Otherwise, the current chunk is the last chunk in the array of chunks
Add the current value to the chunk
Return the modified array of chunks
Initialize the reduction by passing an empty array
Currying based on chunkSize:
var chunk3 = function(array) {
return chunk(3, array);
};
console.log(chunk3(['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b', 'c' ], [ 'd', 'e' ] ]
You can add the chunk() function to the global Array object:
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
return this.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = [];
previous.push(chunk);
}
else {
chunk = previous[previous.length -1];
}
chunk.push(current);
return previous;
}, []);
}
});
console.log(['a', 'b', 'c', 'd', 'e'].chunk(4));
// prints [ [ 'a', 'b', 'c' 'd' ], [ 'e' ] ]
Use chunk from lodash
lodash.chunk(arr,<size>).forEach(chunk=>{
console.log(chunk);
})
js
function splitToBulks(arr, bulkSize = 20) {
const bulks = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
console.log(splitToBulks([1, 2, 3, 4, 5, 6, 7], 3));
typescript
function splitToBulks<T>(arr: T[], bulkSize: number = 20): T[][] {
const bulks: T[][] = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
results = []
chunk_size = 10
while(array.length > 0){
results.push(array.splice(0, chunk_size))
}
The one line in pure javascript:
function chunks(array, size) {
return Array.apply(0,{length: Math.ceil(array.length / size)}).map((_, index) => array.slice(index*size, (index+1)*size))
}
// The following will group letters of the alphabet by 4
console.log(chunks([...Array(26)].map((x,i)=>String.fromCharCode(i + 97)), 4))
Here is an example where I split an array into chunks of 2 elements, simply by splicing chunks out of the array until the original array is empty.
const array = [86,133,87,133,88,133,89,133,90,133];
const new_array = [];
const chunksize = 2;
while (array.length) {
const chunk = array.splice(0,chunksize);
new_array.push(chunk);
}
console.log(new_array)
You can use the Array.prototype.reduce function to do this in one line.
let arr = [1,2,3,4];
function chunk(arr, size)
{
let result = arr.reduce((rows, key, index) => (index % size == 0 ? rows.push([key]) : rows[rows.length-1].push(key)) && rows, []);
return result;
}
console.log(chunk(arr,2));
const array = ['a', 'b', 'c', 'd', 'e'];
const size = 2;
const chunks = [];
while (array.length) {
chunks.push(array.splice(0, size));
}
console.log(chunks);
in coffeescript:
b = (a.splice(0, len) while a.length)
demo
a = [1, 2, 3, 4, 5, 6, 7]
b = (a.splice(0, 2) while a.length)
[ [ 1, 2 ],
[ 3, 4 ],
[ 5, 6 ],
[ 7 ] ]
And this would be my contribution to this topic. I guess .reduce() is the best way.
var segment = (arr, n) => arr.reduce((r,e,i) => i%n ? (r[r.length-1].push(e), r)
: (r.push([e]), r), []),
arr = Array.from({length: 31}).map((_,i) => i+1);
res = segment(arr,7);
console.log(JSON.stringify(res));
But the above implementation is not very efficient since .reduce() runs through all arr function. A more efficient approach (very close to the fastest imperative solution) would be, iterating over the reduced (to be chunked) array since we can calculate it's size in advance by Math.ceil(arr/n);. Once we have the empty result array like Array(Math.ceil(arr.length/n)).fill(); the rest is to map slices of the arr array into it.
function chunk(arr,n){
var r = Array(Math.ceil(arr.length/n)).fill();
return r.map((e,i) => arr.slice(i*n, i*n+n));
}
arr = Array.from({length: 31},(_,i) => i+1);
res = chunk(arr,7);
console.log(JSON.stringify(res));
So far so good but we can still simplify the above snipet further.
var chunk = (a,n) => Array.from({length: Math.ceil(a.length/n)}, (_,i) => a.slice(i*n, i*n+n)),
arr = Array.from({length: 31},(_,i) => i+1),
res = chunk(arr,7);
console.log(JSON.stringify(res));
I have two arrays:
var array1 = ["A", "B", "C"];
var array2 = ["1", "2", "3"];
How can I set another array to contain every combination of the above, so that:
var combos = ["A1", "A2", "A3", "B1", "B2", "B3", "C1", "C2", "C3"];
Or if you'd like to create combinations with an arbitrary number of arrays of arbitrary sizes...(I'm sure you can do this recursively, but since this isn't a job interview, I'm instead using an iterative "odometer" for this...it increments a "number" with each digit a "base-n" digit based on the length of each array)...for example...
combineArrays([ ["A","B","C"],
["+", "-", "*", "/"],
["1","2"] ] )
...returns...
[
"A+1","A+2","A-1", "A-2",
"A*1", "A*2", "A/1", "A/2",
"B+1","B+2","B-1", "B-2",
"B*1", "B*2", "B/1", "B/2",
"C+1","C+2","C-1", "C-2",
"C*1", "C*2", "C/1", "C/2"
]
...each of these corresponding to an "odometer" value that
picks an index from each array...
[0,0,0], [0,0,1], [0,1,0], [0,1,1]
[0,2,0], [0,2,1], [0,3,0], [0,3,1]
[1,0,0], [1,0,1], [1,1,0], [1,1,1]
[1,2,0], [1,2,1], [1,3,0], [1,3,1]
[2,0,0], [2,0,1], [2,1,0], [2,1,1]
[2,2,0], [2,2,1], [2,3,0], [2,3,1]
The "odometer" method allows you to easily generate
the type of output you want, not just the concatenated strings
like we have here. Besides that, by avoiding recursion
we avoid the possibility of -- dare I say it? -- a stack overflow...
function combineArrays( array_of_arrays ){
// First, handle some degenerate cases...
if( ! array_of_arrays ){
// Or maybe we should toss an exception...?
return [];
}
if( ! Array.isArray( array_of_arrays ) ){
// Or maybe we should toss an exception...?
return [];
}
if( array_of_arrays.length == 0 ){
return [];
}
for( let i = 0 ; i < array_of_arrays.length; i++ ){
if( ! Array.isArray(array_of_arrays[i]) || array_of_arrays[i].length == 0 ){
// If any of the arrays in array_of_arrays are not arrays or zero-length, return an empty array...
return [];
}
}
// Done with degenerate cases...
// Start "odometer" with a 0 for each array in array_of_arrays.
let odometer = new Array( array_of_arrays.length );
odometer.fill( 0 );
let output = [];
let newCombination = formCombination( odometer, array_of_arrays );
output.push( newCombination );
while ( odometer_increment( odometer, array_of_arrays ) ){
newCombination = formCombination( odometer, array_of_arrays );
output.push( newCombination );
}
return output;
}/* combineArrays() */
// Translate "odometer" to combinations from array_of_arrays
function formCombination( odometer, array_of_arrays ){
// In Imperative Programmingese (i.e., English):
// let s_output = "";
// for( let i=0; i < odometer.length; i++ ){
// s_output += "" + array_of_arrays[i][odometer[i]];
// }
// return s_output;
// In Functional Programmingese (Henny Youngman one-liner):
return odometer.reduce(
function(accumulator, odometer_value, odometer_index){
return "" + accumulator + array_of_arrays[odometer_index][odometer_value];
},
""
);
}/* formCombination() */
function odometer_increment( odometer, array_of_arrays ){
// Basically, work you way from the rightmost digit of the "odometer"...
// if you're able to increment without cycling that digit back to zero,
// you're all done, otherwise, cycle that digit to zero and go one digit to the
// left, and begin again until you're able to increment a digit
// without cycling it...simple, huh...?
for( let i_odometer_digit = odometer.length-1; i_odometer_digit >=0; i_odometer_digit-- ){
let maxee = array_of_arrays[i_odometer_digit].length - 1;
if( odometer[i_odometer_digit] + 1 <= maxee ){
// increment, and you're done...
odometer[i_odometer_digit]++;
return true;
}
else{
if( i_odometer_digit - 1 < 0 ){
// No more digits left to increment, end of the line...
return false;
}
else{
// Can't increment this digit, cycle it to zero and continue
// the loop to go over to the next digit...
odometer[i_odometer_digit]=0;
continue;
}
}
}/* for( let odometer_digit = odometer.length-1; odometer_digit >=0; odometer_digit-- ) */
}/* odometer_increment() */
Just in case anyone is looking for Array.map solution
var array1=["A","B","C"];
var array2=["1","2","3","4"];
console.log(array1.flatMap(d => array2.map(v => d + v)))
Seeing a lot of for loops in all of the answers...
Here's a recursive solution I came up with that will find all combinations of N number of arrays by taking 1 element from each array:
const array1=["A","B","C"]
const array2=["1","2","3"]
const array3=["red","blue","green"]
const combine = ([head, ...[headTail, ...tailTail]]) => {
if (!headTail) return head
const combined = headTail.reduce((acc, x) => {
return acc.concat(head.map(h => `${h}${x}`))
}, [])
return combine([combined, ...tailTail])
}
console.log('With your example arrays:', combine([array1, array2]))
console.log('With N arrays:', combine([array1, array2, array3]))
//-----------UPDATE BELOW FOR COMMENT---------
// With objects
const array4=[{letter: "A"}, {letter: "B"}, {letter: "C"}]
const array5=[{number: 1}, {number: 2}, {number: 3}]
const array6=[{color: "RED"}, {color: "BLUE"}, {color: "GREEN"}]
const combineObjects = ([head, ...[headTail, ...tailTail]]) => {
if (!headTail) return head
const combined = headTail.reduce((acc, x) => {
return acc.concat(head.map(h => ({...h, ...x})))
}, [])
return combineObjects([combined, ...tailTail])
}
console.log('With arrays of objects:', combineObjects([array4, array5, array6]))
A loop of this form
combos = [] //or combos = new Array(2);
for(var i = 0; i < array1.length; i++)
{
for(var j = 0; j < array2.length; j++)
{
//you would access the element of the array as array1[i] and array2[j]
//create and array with as many elements as the number of arrays you are to combine
//add them in
//you could have as many dimensions as you need
combos.push(array1[i] + array2[j])
}
}
Assuming you're using a recent web browser with support for Array.forEach:
var combos = [];
array1.forEach(function(a1){
array2.forEach(function(a2){
combos.push(a1 + a2);
});
});
If you don't have forEach, it is an easy enough exercise to rewrite this without it. As others have proven before, there's also some performance advantages to doing without... (Though I contend that not long from now, the common JavaScript runtimes will optimize away any current advantages to doing this otherwise.)
Solution enhancement for #Nitish Narang's answer.
Use reduce in combo with flatMap to support N arrays combination.
const combo = [
["A", "B", "C"],
["1", "2", "3", "4"]
];
console.log(combo.reduce((a, b) => a.flatMap(x => b.map(y => x + y)), ['']))
Here is functional programming ES6 solution:
var array1=["A","B","C"];
var array2=["1","2","3"];
var result = array1.reduce( (a, v) =>
[...a, ...array2.map(x=>v+x)],
[]);
/*---------OR--------------*/
var result1 = array1.reduce( (a, v, i) =>
a.concat(array2.map( w => v + w )),
[]);
/*-------------OR(without arrow function)---------------*/
var result2 = array1.reduce(function(a, v, i) {
a = a.concat(array2.map(function(w){
return v + w
}));
return a;
},[]
);
console.log(result);
console.log(result1);
console.log(result2)
Part II: After my complicated iterative "odometer" solution of July 2018, here's a simpler recursive version of combineArraysRecursively()...
function combineArraysRecursively( array_of_arrays ){
// First, handle some degenerate cases...
if( ! array_of_arrays ){
// Or maybe we should toss an exception...?
return [];
}
if( ! Array.isArray( array_of_arrays ) ){
// Or maybe we should toss an exception...?
return [];
}
if( array_of_arrays.length == 0 ){
return [];
}
for( let i = 0 ; i < array_of_arrays.length; i++ ){
if( ! Array.isArray(array_of_arrays[i]) || array_of_arrays[i].length == 0 ){
// If any of the arrays in array_of_arrays are not arrays or are zero-length array, return an empty array...
return [];
}
}
// Done with degenerate cases...
let outputs = [];
function permute(arrayOfArrays, whichArray=0, output=""){
arrayOfArrays[whichArray].forEach((array_element)=>{
if( whichArray == array_of_arrays.length - 1 ){
// Base case...
outputs.push( output + array_element );
}
else{
// Recursive case...
permute(arrayOfArrays, whichArray+1, output + array_element );
}
});/* forEach() */
}
permute(array_of_arrays);
return outputs;
}/* function combineArraysRecursively() */
const array1 = ["A","B","C"];
const array2 = ["+", "-", "*", "/"];
const array3 = ["1","2"];
console.log("combineArraysRecursively(array1, array2, array3) = ", combineArraysRecursively([array1, array2, array3]) );
Here is another take. Just one function and no recursion.
function allCombinations(arrays) {
const numberOfCombinations = arrays.reduce(
(res, array) => res * array.length,
1
)
const result = Array(numberOfCombinations)
.fill(0)
.map(() => [])
let repeatEachElement
for (let i = 0; i < arrays.length; i++) {
const array = arrays[i]
repeatEachElement = repeatEachElement ?
repeatEachElement / array.length :
numberOfCombinations / array.length
const everyElementRepeatedLength = repeatEachElement * array.length
for (let j = 0; j < numberOfCombinations; j++) {
const index = Math.floor(
(j % everyElementRepeatedLength) / repeatEachElement
)
result[j][i] = array[index]
}
}
return result
}
const result = allCombinations([
['a', 'b', 'c', 'd'],
[1, 2, 3],
[true, false],
])
console.log(result.join('\n'))
Arbitrary number of arrays, arbitrary number of elements.
Sort of using number base theory I guess - the j-th array changes to the next element every time the number of combinations of the j-1 arrays has been exhausted. Calling these arrays 'vectors' here.
let vectorsInstance = [
[1, 2],
[6, 7, 9],
[10, 11],
[1, 5, 8, 17]]
function getCombos(vectors) {
function countComb(vectors) {
let numComb = 1
for (vector of vectors) {
numComb *= vector.length
}
return numComb
}
let allComb = countComb(vectors)
let combos = []
for (let i = 0; i < allComb; i++) {
let thisCombo = []
for (j = 0; j < vectors.length; j++) {
let vector = vectors[j]
let prevComb = countComb(vectors.slice(0, j))
thisCombo.push(vector[Math.floor(i / prevComb) % vector.length])
}
combos.push(thisCombo)
}
return combos
}
console.log(getCombos(vectorsInstance))
While there's already plenty of good answers to get every combination, which is of course the original question, I'd just like to add a solution for pagination. Whenever there's permutations involved, there's the risk of extremely large numbers. Let's say, for whatever reason, we wanted to build an interface where a user could still browse through pages of practically unlimited permutations, e.g. show permutations 750-760 out of one gazillion.
We could do so using an odometer similar to the one in John's solution. Instead of only incrementing our way through the odometer, we also calculate its initial value, similar to how you'd convert for example seconds into a hh:mm:ss clock.
function getPermutations(arrays, startIndex = 0, endIndex) {
if (
!Array.isArray(arrays) ||
arrays.length === 0 ||
arrays.some(array => !Array.isArray(array))
) {
return { start: 0, end: 0, total: 0, permutations: [] };
}
const permutations = [];
const arrayCount = arrays.length;
const arrayLengths = arrays.map(a => a.length);
const maxIndex = arrayLengths.reduce(
(product, arrayLength) => product * arrayLength,
1,
);
if (typeof endIndex !== 'number' || endIndex > maxIndex) {
endIndex = maxIndex;
}
const odometer = Array.from({ length: arrayCount }).fill(0);
for (let i = startIndex; i < endIndex; i++) {
let _i = i; // _i is modified and assigned to odometer indexes
for (let odometerIndex = arrayCount - 1; odometerIndex >= 0; odometerIndex--) {
odometer[odometerIndex] = _i % arrayLengths[odometerIndex];
if (odometer[odometerIndex] > 0 && i > startIndex) {
// Higher order values in existing odometer are still valid
// if we're not hitting 0, since there's been no overflow.
// However, startIndex always needs to follow through the loop
// to assign initial odometer.
break;
}
// Prepare _i for next odometer index by truncating rightmost digit
_i = Math.floor(_i / arrayLengths[odometerIndex]);
}
permutations.push(
odometer.map(
(odometerValue, odometerIndex) => arrays[odometerIndex][odometerValue],
),
);
}
return {
start: startIndex,
end: endIndex,
total: maxIndex,
permutations,
};
}
So for the original question, we'd do
getPermutations([['A', 'B', 'C'], ['1', '2', '3']]);
-->
{
"start": 0,
"end": 9,
"total": 9,
"permutations": [
["A", "1"],
["A", "2"],
["A", "3"],
["B", "1"],
["B", "2"],
["B", "3"],
["C", "1"],
["C", "2"],
["C", "3"]
]
}
but we could also do
getPermutations([['A', 'B', 'C'], ['1', '2', '3']], 2, 5);
-->
{
"start": 2,
"end": 5,
"total": 9,
"permutations": [
["A", "3"],
["B", "1"],
["B", "2"]
]
}
And more importantly, we could do
getPermutations(
[
new Array(1000).fill(0),
new Array(1000).fill(1),
new Array(1000).fill(2),
new Array(1000).fill(3),
['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'],
['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J'],
['X', 'Y', 'Z'],
['1', '2', '3', '4', '5', '6']
],
750,
760
);
-->
{
"start": 750,
"end": 760,
"total": 1800000000000000,
"permutations": [
[0, 1, 2, 3, "e", "B", "Z", "1"],
[0, 1, 2, 3, "e", "B", "Z", "2"],
[0, 1, 2, 3, "e", "B", "Z", "3"],
[0, 1, 2, 3, "e", "B", "Z", "4"],
[0, 1, 2, 3, "e", "B", "Z", "5"],
[0, 1, 2, 3, "e", "B", "Z", "6"],
[0, 1, 2, 3, "e", "C", "X", "1"],
[0, 1, 2, 3, "e", "C", "X", "2"],
[0, 1, 2, 3, "e", "C", "X", "3"],
[0, 1, 2, 3, "e", "C", "X", "4"]
]
}
without the computer hanging.
Here's a short recursive one that takes N arrays.
function permuteArrays(first, next, ...rest) {
if (rest.length) next = permuteArrays(next, ...rest);
return first.flatMap(a => next.map(b => [a, b].flat()));
}
Or with reduce (slight enhancement of Penny Liu's):
function multiply(a, b) {
return a.flatMap(c => b.map(d => [c, d].flat()));
}
[['a', 'b', 'c'], ['+', '-'], [1, 2, 3]].reduce(multiply);
Runnable example:
function permuteArrays(first, next, ...rest) {
if (rest.length) next = permuteArrays(next, ...rest);
return first.flatMap(a => next.map(b => [a, b].flat()));
}
const squish = arr => arr.join('');
console.log(
permuteArrays(['A', 'B', 'C'], ['+', '-', '×', '÷'], [1, 2]).map(squish),
permuteArrays(['a', 'b', 'c'], [1, 2, 3]).map(squish),
permuteArrays([['a', 'foo'], 'b'], [1, 2]).map(squish),
permuteArrays(['a', 'b', 'c'], [1, 2, 3], ['foo', 'bar', 'baz']).map(squish),
)
I had a similar requirement, but I needed get all combinations of the keys of an object so that I could split it into multiple objects. For example, I needed to convert the following;
{ key1: [value1, value2], key2: [value3, value4] }
into the following 4 objects
{ key1: value1, key2: value3 }
{ key1: value1, key2: value4 }
{ key1: value2, key2: value3 }
{ key1: value2, key2: value4 }
I solved this with an entry function splitToMultipleKeys and a recursive function spreadKeys;
function spreadKeys(master, objects) {
const masterKeys = Object.keys(master);
const nextKey = masterKeys.pop();
const nextValue = master[nextKey];
const newObjects = [];
for (const value of nextValue) {
for (const ob of objects) {
const newObject = Object.assign({ [nextKey]: value }, ob);
newObjects.push(newObject);
}
}
if (masterKeys.length === 0) {
return newObjects;
}
const masterClone = Object.assign({}, master);
delete masterClone[nextKey];
return spreadKeys(masterClone, newObjects);
}
export function splitToMultipleKeys(key) {
const objects = [{}];
return spreadKeys(key, objects);
}
one more:
const buildCombinations = (allGroups: string[][]) => {
const indexInArray = new Array(allGroups.length);
indexInArray.fill(0);
let arrayIndex = 0;
const resultArray: string[] = [];
while (allGroups[arrayIndex]) {
let str = "";
allGroups.forEach((g, index) => {
str += g[indexInArray[index]];
});
resultArray.push(str);
// if not last item in array already, switch index to next item in array
if (indexInArray[arrayIndex] < allGroups[arrayIndex].length - 1) {
indexInArray[arrayIndex] += 1;
} else {
// set item index for the next array
indexInArray[arrayIndex] = 0;
arrayIndex += 1;
// exclude arrays with 1 element
while (allGroups[arrayIndex] && allGroups[arrayIndex].length === 1) {
arrayIndex += 1;
}
indexInArray[arrayIndex] = 1;
}
}
return resultArray;
};
One example:
const testArrays = [["a","b"],["c"],["d","e","f"]]
const result = buildCombinations(testArrays)
// -> ["acd","bcd","ace","acf"]
My version of the solution by John D. Aynedjian, which I rewrote for my own understanding.
console.log(getPermutations([["A","B","C"],["1","2","3"]]));
function getPermutations(arrayOfArrays)
{
let permutations=[];
let remainder,permutation;
let permutationCount=1;
let placeValue=1;
let placeValues=new Array(arrayOfArrays.length);
for(let i=arrayOfArrays.length-1;i>=0;i--)
{
placeValues[i]=placeValue;
placeValue*=arrayOfArrays[i].length;
}
permutationCount=placeValue;
for(let i=0;i<permutationCount;i++)
{
remainder=i;
permutation=[];
for(let j=0;j<arrayOfArrays.length;j++)
{
permutation[j]=arrayOfArrays[j][Math.floor(remainder/placeValues[j])];
remainder=remainder%placeValues[j];
}
permutations.push(permutation.reduce((prev,curr)=>prev+curr,"")); }
return permutations;
}
First express arrays as array of arrays:
arrayOfArrays=[["A","B","C"],["a","b","c","d"],["1","2"]];
Next work out the number of permuations in the solution by multiplying the number of elements in each array by each other:
//["A","B","C"].length*["a","b","c","d"].length*["1","2"].length //24 permuations
Then give each array a place value, starting with the last:
//["1","2"] place value 1
//["a","b","c","d"] place value 2 (each one of these letters has 2 possibilities to the right i.e. 1 and 2)
//["A","B","C"] place value 8 (each one of these letters has 8 possibilities to the right i.e. a1,a2,b1,b2,c1,c2,d1,d2
placeValues=[8,2,1]
This allows each element to be represented by a single digit:
arrayOfArrays[0][2]+arrayOfArrays[1][3]+arrayOfArrays[2][0] //"Cc1"
...would be:
2*placeValues[2]+3*placesValues[1]+0*placeValues[2] //2*8+3*2+0*1=22
We actually need to do the reverse of this so convert numbers 0 to the number of permutations to an index of each array using quotients and remainders of the permutation number.
Like so:
//0 = [0,0,0], 1 = [0,0,1], 2 = [0,1,0], 3 = [0,1,1]
for(let i=0;i<permutationCount;i++)
{
remainder=i;
permutation=[];
for(let j=0;j<arrayOfArrays.length;j++)
{
permutation[j]=arrayOfArrays[j][Math.floor(remainder/placeValues[j])];
remainder=remainder%placeValues[j];
}
permutations.push(permutation.join(""));
}
The last bit turns the permutation into a string, as requested.
Make a loop like this
->
let numbers = [1,2,3,4,5];
let letters = ["A","B","C","D","E"];
let combos = [];
for(let i = 0; i < numbers.length; i++) {
combos.push(letters[i] + numbers[i]);
};
But you should make the array of “numbers” and “letters” at the same length thats it!
Let's say that I have an Javascript array looking as following:
["Element 1","Element 2","Element 3",...]; // with close to a hundred elements.
What approach would be appropriate to chunk (split) the array into many smaller arrays with, lets say, 10 elements at its most?
The array.slice() method can extract a slice from the beginning, middle, or end of an array for whatever purposes you require, without changing the original array.
const chunkSize = 10;
for (let i = 0; i < array.length; i += chunkSize) {
const chunk = array.slice(i, i + chunkSize);
// do whatever
}
The last chunk may be smaller than chunkSize. For example when given an array of 12 elements the first chunk will have 10 elements, the second chunk only has 2.
Note that a chunkSize of 0 will cause an infinite loop.
Here's a ES6 version using reduce
const perChunk = 2 // items per chunk
const inputArray = ['a','b','c','d','e']
const result = inputArray.reduce((resultArray, item, index) => {
const chunkIndex = Math.floor(index/perChunk)
if(!resultArray[chunkIndex]) {
resultArray[chunkIndex] = [] // start a new chunk
}
resultArray[chunkIndex].push(item)
return resultArray
}, [])
console.log(result); // result: [['a','b'], ['c','d'], ['e']]
And you're ready to chain further map/reduce transformations.
Your input array is left intact
If you prefer a shorter but less readable version, you can sprinkle some concat into the mix for the same end result:
inputArray.reduce((all,one,i) => {
const ch = Math.floor(i/perChunk);
all[ch] = [].concat((all[ch]||[]),one);
return all
}, [])
You can use remainder operator to put consecutive items into different chunks:
const ch = (i % perChunk);
Modified from an answer by dbaseman: https://stackoverflow.com/a/10456344/711085
Object.defineProperty(Array.prototype, 'chunk_inefficient', {
value: function(chunkSize) {
var array = this;
return [].concat.apply([],
array.map(function(elem, i) {
return i % chunkSize ? [] : [array.slice(i, i + chunkSize)];
})
);
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk_inefficient(3)
)
// [[1, 2, 3], [4, 5, 6], [7]]
minor addendum:
I should point out that the above is a not-that-elegant (in my mind) workaround to use Array.map. It basically does the following, where ~ is concatenation:
[[1,2,3]]~[]~[]~[] ~ [[4,5,6]]~[]~[]~[] ~ [[7]]
It has the same asymptotic running time as the method below, but perhaps a worse constant factor due to building empty lists. One could rewrite this as follows (mostly the same as Blazemonger's method, which is why I did not originally submit this answer):
More efficient method:
// refresh page if experimenting and you already defined Array.prototype.chunk
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
var R = [];
for (var i = 0; i < this.length; i += chunkSize)
R.push(this.slice(i, i + chunkSize));
return R;
}
});
console.log(
[1, 2, 3, 4, 5, 6, 7].chunk(3)
)
My preferred way nowadays is the above, or one of the following:
Array.range = function(n) {
// Array.range(5) --> [0,1,2,3,4]
return Array.apply(null,Array(n)).map((x,i) => i)
};
Object.defineProperty(Array.prototype, 'chunk', {
value: function(n) {
// ACTUAL CODE FOR CHUNKING ARRAY:
return Array.range(Math.ceil(this.length/n)).map((x,i) => this.slice(i*n,i*n+n));
}
});
Demo:
> JSON.stringify( Array.range(10).chunk(3) );
[[1,2,3],[4,5,6],[7,8,9],[10]]
Or if you don't want an Array.range function, it's actually just a one-liner (excluding the fluff):
var ceil = Math.ceil;
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array(ceil(this.length/n)).fill().map((_,i) => this.slice(i*n,i*n+n));
}});
or
Object.defineProperty(Array.prototype, 'chunk', {value: function(n) {
return Array.from(Array(ceil(this.length/n)), (_,i)=>this.slice(i*n,i*n+n));
}});
Try to avoid mucking with native prototypes, including Array.prototype, if you don't know who will be consuming your code (3rd parties, coworkers, yourself at a later date, etc.).
There are ways to safely extend prototypes (but not in all browsers) and there are ways to safely consume objects created from extended prototypes, but a better rule of thumb is to follow the Principle of Least Surprise and avoid these practices altogether.
If you have some time, watch Andrew Dupont's JSConf 2011 talk, "Everything is Permitted: Extending Built-ins", for a good discussion about this topic.
But back to the question, while the solutions above will work, they are overly complex and requiring unnecessary computational overhead. Here is my solution:
function chunk (arr, len) {
var chunks = [],
i = 0,
n = arr.length;
while (i < n) {
chunks.push(arr.slice(i, i += len));
}
return chunks;
}
// Optionally, you can do the following to avoid cluttering the global namespace:
Array.chunk = chunk;
Using generators
function* chunks(arr, n) {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
let someArray = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
console.log([...chunks(someArray, 2)]) // [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9]]
Can be typed with Typescript like so:
function* chunks<T>(arr: T[], n: number): Generator<T[], void> {
for (let i = 0; i < arr.length; i += n) {
yield arr.slice(i, i + n);
}
}
I tested the different answers into jsperf.com. The result is available there: https://web.archive.org/web/20150909134228/https://jsperf.com/chunk-mtds
And the fastest function (and that works from IE8) is this one:
function chunk(arr, chunkSize) {
if (chunkSize <= 0) throw "Invalid chunk size";
var R = [];
for (var i=0,len=arr.length; i<len; i+=chunkSize)
R.push(arr.slice(i,i+chunkSize));
return R;
}
Splice version using ES6
let [list,chunkSize] = [[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15], 6];
list = [...Array(Math.ceil(list.length / chunkSize))].map(_ => list.splice(0,chunkSize))
console.log(list);
I'd prefer to use splice method:
var chunks = function(array, size) {
var results = [];
while (array.length) {
results.push(array.splice(0, size));
}
return results;
};
Nowadays you can use lodash' chunk function to split the array into smaller arrays https://lodash.com/docs#chunk No need to fiddle with the loops anymore!
Old question: New answer! I actually was working with an answer from this question and had a friend improve on it! So here it is:
Array.prototype.chunk = function ( n ) {
if ( !this.length ) {
return [];
}
return [ this.slice( 0, n ) ].concat( this.slice(n).chunk(n) );
};
[1,2,3,4,5,6,7,8,9,0].chunk(3);
> [[1,2,3],[4,5,6],[7,8,9],[0]]
One more solution using Array.prototype.reduce():
const chunk = (array, size) =>
array.reduce((acc, _, i) => {
if (i % size === 0) acc.push(array.slice(i, i + size))
return acc
}, [])
// Usage:
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
const chunked = chunk(numbers, 3)
console.log(chunked)
This solution is very similar to the solution by Steve Holgado. However, because this solution doesn't utilize array spreading and doesn't create new arrays in the reducer function, it's faster (see jsPerf test) and subjectively more readable (simpler syntax) than the other solution.
At every nth iteration (where n = size; starting at the first iteration), the accumulator array (acc) is appended with a chunk of the array (array.slice(i, i + size)) and then returned. At other iterations, the accumulator array is returned as-is.
If size is zero, the method returns an empty array. If size is negative, the method returns broken results. So, if needed in your case, you may want to do something about negative or non-positive size values.
If speed is important in your case, a simple for loop would be faster than using reduce() (see the jsPerf test), and some may find this style more readable as well:
function chunk(array, size) {
// This prevents infinite loops
if (size < 1) throw new Error('Size must be positive')
const result = []
for (let i = 0; i < array.length; i += size) {
result.push(array.slice(i, i + size))
}
return result
}
There have been many answers but this is what I use:
const chunk = (arr, size) =>
arr
.reduce((acc, _, i) =>
(i % size)
? acc
: [...acc, arr.slice(i, i + size)]
, [])
// USAGE
const numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
chunk(numbers, 3)
// [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
First, check for a remainder when dividing the index by the chunk size.
If there is a remainder then just return the accumulator array.
If there is no remainder then the index is divisible by the chunk size, so take a slice from the original array (starting at the current index) and add it to the accumulator array.
So, the returned accumulator array for each iteration of reduce looks something like this:
// 0: [[1, 2, 3]]
// 1: [[1, 2, 3]]
// 2: [[1, 2, 3]]
// 3: [[1, 2, 3], [4, 5, 6]]
// 4: [[1, 2, 3], [4, 5, 6]]
// 5: [[1, 2, 3], [4, 5, 6]]
// 6: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 7: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 8: [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
// 9: [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10]]
I think this a nice recursive solution with ES6 syntax:
const chunk = function(array, size) {
if (!array.length) {
return [];
}
const head = array.slice(0, size);
const tail = array.slice(size);
return [head, ...chunk(tail, size)];
};
console.log(chunk([1,2,3], 2));
ONE-LINER
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
For TypeScript
const chunk = <T>(arr: T[], size: number): T[][] =>
[...Array(Math.ceil(arr.length / size))].map((_, i) =>
arr.slice(size * i, size + size * i)
);
DEMO
const chunk = (a,n)=>[...Array(Math.ceil(a.length/n))].map((_,i)=>a.slice(n*i,n+n*i));
document.write(JSON.stringify(chunk([1, 2, 3, 4], 2)));
Chunk By Number Of Groups
const part=(a,n)=>[...Array(n)].map((_,i)=>a.slice(i*Math.ceil(a.length/n),(i+1)*Math.ceil(a.length/n)));
For TypeScript
const part = <T>(a: T[], n: number): T[][] => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
DEMO
const part = (a, n) => {
const b = Math.ceil(a.length / n);
return [...Array(n)].map((_, i) => a.slice(i * b, (i + 1) * b));
};
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6], 2))+'<br/>');
document.write(JSON.stringify(part([1, 2, 3, 4, 5, 6, 7], 2)));
Ok, let's start with a fairly tight one:
function chunk(arr, n) {
return arr.slice(0,(arr.length+n-1)/n|0).
map(function(c,i) { return arr.slice(n*i,n*i+n); });
}
Which is used like this:
chunk([1,2,3,4,5,6,7], 2);
Then we have this tight reducer function:
function chunker(p, c, i) {
(p[i/this|0] = p[i/this|0] || []).push(c);
return p;
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker.bind(3),[]);
Since a kitten dies when we bind this to a number, we can do manual currying like this instead:
// Fluent alternative API without prototype hacks.
function chunker(n) {
return function(p, c, i) {
(p[i/n|0] = p[i/n|0] || []).push(c);
return p;
};
}
Which is used like this:
[1,2,3,4,5,6,7].reduce(chunker(3),[]);
Then the still pretty tight function which does it all in one go:
function chunk(arr, n) {
return arr.reduce(function(p, cur, i) {
(p[i/n|0] = p[i/n|0] || []).push(cur);
return p;
},[]);
}
chunk([1,2,3,4,5,6,7], 3);
I aimed at creating a simple non-mutating solution in pure ES6. Peculiarities in javascript make it necessary to fill the empty array before mapping :-(
function chunk(a, l) {
return new Array(Math.ceil(a.length / l)).fill(0)
.map((_, n) => a.slice(n*l, n*l + l));
}
This version with recursion seem simpler and more compelling:
function chunk(a, l) {
if (a.length == 0) return [];
else return [a.slice(0, l)].concat(chunk(a.slice(l), l));
}
The ridiculously weak array functions of ES6 makes for good puzzles :-)
Created a npm package for this https://www.npmjs.com/package/array.chunk
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.slice(i, size + i));
}
return result;
When using a TypedArray
var result = [];
for (var i = 0; i < arr.length; i += size) {
result.push(arr.subarray(i, size + i));
}
return result;
Using Array.prototype.splice() and splice it until the array has element.
Array.prototype.chunk = function(size) {
let result = [];
while(this.length) {
result.push(this.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log(arr.chunk(2));
Update
Array.prototype.splice() populates the original array and after performing the chunk() the original array (arr) becomes [].
So if you want to keep the original array untouched, then copy and keep the arr data into another array and do the same thing.
Array.prototype.chunk = function(size) {
let data = [...this];
let result = [];
while(data.length) {
result.push(data.splice(0, size));
}
return result;
}
const arr = [1, 2, 3, 4, 5, 6, 7, 8, 9];
console.log('chunked:', arr.chunk(2));
console.log('original', arr);
P.S: Thanks to #mts-knn for mentioning the matter.
I recommend using lodash. Chunking is one of many useful functions there.
Instructions:
npm i --save lodash
Include in your project:
import * as _ from 'lodash';
Usage:
const arrayOfElements = ["Element 1","Element 2","Element 3", "Element 4", "Element 5","Element 6","Element 7","Element 8","Element 9","Element 10","Element 11","Element 12"]
const chunkedElements = _.chunk(arrayOfElements, 10)
You can find my sample here:
https://playcode.io/659171/
The following ES2015 approach works without having to define a function and directly on anonymous arrays (example with chunk size 2):
[11,22,33,44,55].map((_, i, all) => all.slice(2*i, 2*i+2)).filter(x=>x.length)
If you want to define a function for this, you could do it as follows (improving on K._'s comment on Blazemonger's answer):
const array_chunks = (array, chunk_size) => array
.map((_, i, all) => all.slice(i*chunk_size, (i+1)*chunk_size))
.filter(x => x.length)
If you use EcmaScript version >= 5.1, you can implement a functional version of chunk() using array.reduce() that has O(N) complexity:
function chunk(chunkSize, array) {
return array.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = []; // 1
previous.push(chunk); // 2
}
else {
chunk = previous[previous.length -1]; // 3
}
chunk.push(current); // 4
return previous; // 5
}, []); // 6
}
console.log(chunk(2, ['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b' ], [ 'c', 'd' ], [ 'e' ] ]
Explanation of each // nbr above:
Create a new chunk if the previous value, i.e. the previously returned array of chunks, is empty or if the last previous chunk has chunkSize items
Add the new chunk to the array of existing chunks
Otherwise, the current chunk is the last chunk in the array of chunks
Add the current value to the chunk
Return the modified array of chunks
Initialize the reduction by passing an empty array
Currying based on chunkSize:
var chunk3 = function(array) {
return chunk(3, array);
};
console.log(chunk3(['a', 'b', 'c', 'd', 'e']));
// prints [ [ 'a', 'b', 'c' ], [ 'd', 'e' ] ]
You can add the chunk() function to the global Array object:
Object.defineProperty(Array.prototype, 'chunk', {
value: function(chunkSize) {
return this.reduce(function(previous, current) {
var chunk;
if (previous.length === 0 ||
previous[previous.length -1].length === chunkSize) {
chunk = [];
previous.push(chunk);
}
else {
chunk = previous[previous.length -1];
}
chunk.push(current);
return previous;
}, []);
}
});
console.log(['a', 'b', 'c', 'd', 'e'].chunk(4));
// prints [ [ 'a', 'b', 'c' 'd' ], [ 'e' ] ]
Use chunk from lodash
lodash.chunk(arr,<size>).forEach(chunk=>{
console.log(chunk);
})
js
function splitToBulks(arr, bulkSize = 20) {
const bulks = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
console.log(splitToBulks([1, 2, 3, 4, 5, 6, 7], 3));
typescript
function splitToBulks<T>(arr: T[], bulkSize: number = 20): T[][] {
const bulks: T[][] = [];
for (let i = 0; i < Math.ceil(arr.length / bulkSize); i++) {
bulks.push(arr.slice(i * bulkSize, (i + 1) * bulkSize));
}
return bulks;
}
results = []
chunk_size = 10
while(array.length > 0){
results.push(array.splice(0, chunk_size))
}
The one line in pure javascript:
function chunks(array, size) {
return Array.apply(0,{length: Math.ceil(array.length / size)}).map((_, index) => array.slice(index*size, (index+1)*size))
}
// The following will group letters of the alphabet by 4
console.log(chunks([...Array(26)].map((x,i)=>String.fromCharCode(i + 97)), 4))
Here is an example where I split an array into chunks of 2 elements, simply by splicing chunks out of the array until the original array is empty.
const array = [86,133,87,133,88,133,89,133,90,133];
const new_array = [];
const chunksize = 2;
while (array.length) {
const chunk = array.splice(0,chunksize);
new_array.push(chunk);
}
console.log(new_array)
You can use the Array.prototype.reduce function to do this in one line.
let arr = [1,2,3,4];
function chunk(arr, size)
{
let result = arr.reduce((rows, key, index) => (index % size == 0 ? rows.push([key]) : rows[rows.length-1].push(key)) && rows, []);
return result;
}
console.log(chunk(arr,2));
const array = ['a', 'b', 'c', 'd', 'e'];
const size = 2;
const chunks = [];
while (array.length) {
chunks.push(array.splice(0, size));
}
console.log(chunks);
in coffeescript:
b = (a.splice(0, len) while a.length)
demo
a = [1, 2, 3, 4, 5, 6, 7]
b = (a.splice(0, 2) while a.length)
[ [ 1, 2 ],
[ 3, 4 ],
[ 5, 6 ],
[ 7 ] ]
And this would be my contribution to this topic. I guess .reduce() is the best way.
var segment = (arr, n) => arr.reduce((r,e,i) => i%n ? (r[r.length-1].push(e), r)
: (r.push([e]), r), []),
arr = Array.from({length: 31}).map((_,i) => i+1);
res = segment(arr,7);
console.log(JSON.stringify(res));
But the above implementation is not very efficient since .reduce() runs through all arr function. A more efficient approach (very close to the fastest imperative solution) would be, iterating over the reduced (to be chunked) array since we can calculate it's size in advance by Math.ceil(arr/n);. Once we have the empty result array like Array(Math.ceil(arr.length/n)).fill(); the rest is to map slices of the arr array into it.
function chunk(arr,n){
var r = Array(Math.ceil(arr.length/n)).fill();
return r.map((e,i) => arr.slice(i*n, i*n+n));
}
arr = Array.from({length: 31},(_,i) => i+1);
res = chunk(arr,7);
console.log(JSON.stringify(res));
So far so good but we can still simplify the above snipet further.
var chunk = (a,n) => Array.from({length: Math.ceil(a.length/n)}, (_,i) => a.slice(i*n, i*n+n)),
arr = Array.from({length: 31},(_,i) => i+1),
res = chunk(arr,7);
console.log(JSON.stringify(res));