I have a question regarding the native Array.forEach implementation of JavaScript: Does it behave asynchronously?
For example, if I call:
[many many elements].forEach(function () {lots of work to do})
Will this be non-blocking?
No, it is blocking. Have a look at the specification of the algorithm.
However a maybe easier to understand implementation is given on MDN:
if (!Array.prototype.forEach)
{
Array.prototype.forEach = function(fun /*, thisp */)
{
"use strict";
if (this === void 0 || this === null)
throw new TypeError();
var t = Object(this);
var len = t.length >>> 0;
if (typeof fun !== "function")
throw new TypeError();
var thisp = arguments[1];
for (var i = 0; i < len; i++)
{
if (i in t)
fun.call(thisp, t[i], i, t);
}
};
}
If you have to execute a lot of code for each element, you should consider to use a different approach:
function processArray(items, process) {
var todo = items.concat();
setTimeout(function() {
process(todo.shift());
if(todo.length > 0) {
setTimeout(arguments.callee, 25);
}
}, 25);
}
and then call it with:
processArray([many many elements], function () {lots of work to do});
This would be non-blocking then. The example is taken from High Performance JavaScript.
Another option might be web workers.
If you need an asynchronous-friendly version of Array.forEach and similar, they're available in the Node.js 'async' module: http://github.com/caolan/async ...as a bonus this module also works in the browser.
async.each(openFiles, saveFile, function(err){
// if any of the saves produced an error, err would equal that error
});
There is a common pattern for doing a really heavy computation in Node that may be applicable to you...
Node is single-threaded (as a deliberate design choice, see What is Node.js?); this means that it can only utilize a single core. Modern boxes have 8, 16, or even more cores, so this could leave 90+% of the machine idle. The common pattern for a REST service is to fire up one node process per core, and put these behind a local load balancer like http://nginx.org/.
Forking a child -
For what you are trying to do, there is another common pattern, forking off a child process to do the heavy lifting. The upside is that the child process can do heavy computation in the background while your parent process is responsive to other events. The catch is that you can't / shouldn't share memory with this child process (not without a LOT of contortions and some native code); you have to pass messages. This will work beautifully if the size of your input and output data is small compared to the computation that must be performed. You can even fire up a child node.js process and use the same code you were using previously.
For example:
var child_process = require('child_process');
function run_in_child(array, cb) {
var process = child_process.exec('node libfn.js', function(err, stdout, stderr) {
var output = JSON.parse(stdout);
cb(err, output);
});
process.stdin.write(JSON.stringify(array), 'utf8');
process.stdin.end();
}
Array.forEach is meant for computing stuff not waiting, and there is nothing to be gained making computations asynchronous in an event loop (webworkers add multiprocessing, if you need multi-core computation). If you want to wait for multiple tasks to end, use a counter, which you can wrap in a semaphore class.
Edit 2018-10-11:
It looks like there is a good chance the standard described below may not go through, consider pipelineing as an alternative (does not behave exactly the same but methods could be implemented in a similar manor).
This is exactly why I am excited about es7, in future you will be able to do something like the code below (some of the specs are not complete so use with caution, I will try to keep this up to date). But basically using the new :: bind operator, you will be able to run a method on an object as if the object's prototype contains the method. eg [Object]::[Method] where normally you would call [Object].[ObjectsMethod]
Note to do this today (24-July-16) and have it work in all browsers you will need to transpile your code for the following functionality:Import / Export, Arrow functions, Promises, Async / Await and most importantly function bind. The code below could be modfied to use only function bind if nessesary, all this functionality is neatly available today by using babel.
YourCode.js (where 'lots of work to do' must simply return a promise, resolving it when the asynchronous work is done.)
import { asyncForEach } from './ArrayExtensions.js';
await [many many elements]::asyncForEach(() => lots of work to do);
ArrayExtensions.js
export function asyncForEach(callback)
{
return Promise.resolve(this).then(async (ar) =>
{
for(let i=0;i<ar.length;i++)
{
await callback.call(ar, ar[i], i, ar);
}
});
};
export function asyncMap(callback)
{
return Promise.resolve(this).then(async (ar) =>
{
const out = [];
for(let i=0;i<ar.length;i++)
{
out[i] = await callback.call(ar, ar[i], i, ar);
}
return out;
});
};
This is a short asynchronous function to use without requiring third party libs
Array.prototype.each = function (iterator, callback) {
var iterate = function () {
pointer++;
if (pointer >= this.length) {
callback();
return;
}
iterator.call(iterator, this[pointer], iterate, pointer);
}.bind(this),
pointer = -1;
iterate(this);
};
There is a package on npm for easy asynchronous for each loops.
var forEachAsync = require('futures').forEachAsync;
// waits for one request to finish before beginning the next
forEachAsync(['dogs', 'cats', 'octocats'], function (next, element, index, array) {
getPics(element, next);
// then after all of the elements have been handled
// the final callback fires to let you know it's all done
}).then(function () {
console.log('All requests have finished');
});
Also another variation forAllAsync
It is possible to code even the solution like this for example :
var loop = function(i, data, callback) {
if (i < data.length) {
//TODO("SELECT * FROM stackoverflowUsers;", function(res) {
//data[i].meta = res;
console.log(i, data[i].title);
return loop(i+1, data, errors, callback);
//});
} else {
return callback(data);
}
};
loop(0, [{"title": "hello"}, {"title": "world"}], function(data) {
console.log("DONE\n"+data);
});
On the other hand, it is much slower than a "for".
Otherwise, the excellent Async library can do this: https://caolan.github.io/async/docs.html#each
These code snippet will give you better understanding of forEach and forOf comparison.
/* eslint-disable no-console */
async function forEachTest() {
console.log('########### Testing forEach ################ ')
console.log('start of forEachTest func')
let a = [1, 2, 3]
await a.forEach(async (v) => {
console.log('start of forEach: ', v)
await new Promise(resolve => setTimeout(resolve, v * 1000))
console.log('end of forEach: ', v)
})
console.log('end of forEachTest func')
}
forEachTest()
async function forOfTest() {
await new Promise(resolve => setTimeout(resolve, 10000)) //just see console in proper way
console.log('\n\n########### Testing forOf ################ ')
console.log('start of forOfTest func')
let a = [1, 2, 3]
for (const v of a) {
console.log('start of forOf: ', v)
await new Promise(resolve => setTimeout(resolve, v * 1000))
console.log('end of forOf: ', v)
}
console.log('end of forOfTest func')
}
forOfTest()
Here is a small example you can run to test it:
[1,2,3,4,5,6,7,8,9].forEach(function(n){
var sum = 0;
console.log('Start for:' + n);
for (var i = 0; i < ( 10 - n) * 100000000; i++)
sum++;
console.log('Ended for:' + n, sum);
});
It will produce something like this(if it takes too less/much time, increase/decrease the number of iterations):
(index):48 Start for:1
(index):52 Ended for:1 900000000
(index):48 Start for:2
(index):52 Ended for:2 800000000
(index):48 Start for:3
(index):52 Ended for:3 700000000
(index):48 Start for:4
(index):52 Ended for:4 600000000
(index):48 Start for:5
(index):52 Ended for:5 500000000
(index):48 Start for:6
(index):52 Ended for:6 400000000
(index):48 Start for:7
(index):52 Ended for:7 300000000
(index):48 Start for:8
(index):52 Ended for:8 200000000
(index):48 Start for:9
(index):52 Ended for:9 100000000
(index):45 [Violation] 'load' handler took 7285ms
Although Array.forEach is not asynchronous, you can get asynchronous "end result". Example below:
function delayFunction(x) {
return new Promise(
(resolve) => setTimeout(() => resolve(x), 1000)
);
}
[1, 2, 3].forEach(async(x) => {
console.log(x);
console.log(await delayFunction(x));
});
Use Promise.each of bluebird library.
Promise.each(
Iterable<any>|Promise<Iterable<any>> input,
function(any item, int index, int length) iterator
) -> Promise
This method iterates over an array, or a promise of an array, which contains promises (or a mix of promises and values) with the given iterator function with the signature (value, index, length) where the value is the resolved value of a respective promise in the input array. Iteration happens serially. If the iterator function returns a promise or a thenable, then the result of the promise is awaited before continuing with next iteration. If any promise in the input array is rejected, then the returned promise is rejected as well.
If all of the iterations resolve successfully, Promise.each resolves to the original array unmodified. However, if one iteration rejects or errors, Promise.each ceases execution immediately and does not process any further iterations. The error or rejected value is returned in this case instead of the original array.
This method is meant to be used for side effects.
var fileNames = ["1.txt", "2.txt", "3.txt"];
Promise.each(fileNames, function(fileName) {
return fs.readFileAsync(fileName).then(function(val){
// do stuff with 'val' here.
});
}).then(function() {
console.log("done");
});
Related
I'm making a bot code for a game running on NodeJS and what this function is supposed to do is to loop through an array of vectors and then make the bot go to each vector.
However, what it's actually doing is telling the bot to run to all the vectors at the same time so it spazzes out and then runs to the last vector in the array:
function digSchedule() {
var arrayLength = blocksToMine.length;
for (var i = 0; i < blocksToMine.length; i++) {
console.log(i);
scaffoldTo(blocksToMine[i]);
}
...
}
The function scaffoldTo() needs to be ran and then wait for the bot to do said function then run it for the next element in the array, but I can't figure out how to do it.
There's several ways to achieve this. The first is probably to pass a callback with the "next function to be called" (probably scaffoldTo()). You can use .bind() to create a reference with the iterator index i.
Alternatively, you could set up a loop of Promises, which by definition have a .then() method which executes once the promise is resolved.
Finally, the async/await pattern is similar to Promises, but some find it clearer and it seems to be winning the Hype Wars: https://hackernoon.com/6-reasons-why-javascripts-async-await-blows-promises-away-tutorial-c7ec10518dd9.
Callbacks (solution 1) will be available in any version of JS. Promises generally available with a library and have native support in ES6. Async/await is a proposal (?) in ES2017 and is generally well supported.
Here's another way that you could play with this. This way focuses more on the callback style, although it does assume that you can modify the scaffoldTo function, as well as the parameters required for digSchedule
function digSchedule(i, callback) {
if(!i){
i = 0;
}
if(i < blocksToMine.length){
scaffoldTo(blocksToMine[i], digSchedule(i++, callback));
}
else{
callback();
}
}
Then inside of scaffoldTo you would need something like this
function scaffoldTo(block, callback){
//do what ever you need to have the bot go to the vector
//after bot goes to vector call callback
callback();
}
In order to start it all you would just need to call digSchedule with something like this:
digSchedule({null or 0}, function(){
console.log("finished visiting vectors");
});
This does change the pattern from using a for loop like you have, but I think its a fun way to accomplish the goal as well.
That is a good use case to the async functions of ES2017.
Please, try the following:
async function digSchedule() {
var arrayLength = blocksToMine.length;
for (var i = 0; i < blocksToMine.length; i++) {
console.log(i);
await scaffoldTo(blocksToMine[i]);
}
...
}
If ES2017 is out of the question, your best choice would be to make a recursive function that only calls itself again when the promise from scaffoldTo is resolved.
You may use async module to achieve this. Alternatively, you may try something like this
function forEachAsync(array, fun, cb) {
var index = 0;
if (index == array.length) {
cb(null);
return;
}
var next = function () {
fun(array[index], function(err) {
if (err) {
cb(err);
return;
}
index++;
if (index < array.length) {
setImmediate(next);
return;
}
//We are done
cb(null);
});
};
next();
}
forEachAsync([1,2,3,4,5], function(e, cb) {
console.log(e);
cb();
}, function(err) {
console.log('done');
});
Here is how we do with the help of promises.
let numbers = new Array(1,3,2,1);
function asyncFunction(number){
return new Promise((resolve,reject)=>{
setTimeout(()=>{
console.log("Number : ",number);
return resolve();
},number*1000);
})
}
let promise = Promise.resolve();
// Here we are using forEach to chain promise one after another.
numbers.forEach(number=>{
promise = promise.then(()=>{
return asyncFunction(number);
});
})
promise.then(()=>{
console.log("Every thing is done!");
})
I have a project where I need to write a function to calculate several things sequentially and then write the results to a SQL DB. Unfortunately, I need to repeat this over 40,000 times.
I use node.js and promises to accomplish this but the memory usage goes to almost 2GB a lot of times the program just dies after 10,000 or so calculations. I developed my own native promiseEach function which takes a array items sequantially and chains it with promises.
What Am I doing wrong here?:
function promiseEach(array,promiseFn){
return new Promise(function(resolve,reject){
try {
var promiseArray = [];
var json = { array: array, counter: 0 }
for (var i = 0; i < array.length; i++) {
promiseArray.push(promiseFn)
}
promiseArray.reduce(function(preFn,curFn,index,pArray){
return preFn
.then( function(z){ return json.array[json.counter++] })
.then(curFn)
},
Promise.resolve(json.array[json.counter]))
.then(resolve,reject)
}catch(err){
console.log("promiseEach ERROR:");
reject(err)
}
})
}
Well, you're overcomplicating the function for a start - from what I can tell, you're calling promiseFn for each of the content of array, in sequence
The following code is identical in function to your code
function promiseEach(array, promiseFn) {
return array.reduce(function(prev, item) {
return prev.then(function() {
return promiseFn(item);
});
}, Promise.resolve());
}
No guarantee that this will fix the actual issue though
for completeness, as you are coding in nodejs, the same code as written in ES2015+
let promiseEach = (array, promiseFn) =>
array.reduce((prev, item) =>
prev.then(() =>
promiseFn(item)
)
, Promise.resolve());
Two lines in the posted code
.then( function(z){ return json.array[json.counter++] })
then(curFn)
seem to indicate promseFn is to be called with a new parameter after the operation performed by a previous call to it completes, and that the fulfilled value of the previous call is not made use of in the promise chain.
A suggestion to avoid creating (39,999 or so) intermediate chained promises before returning from the each function, is to use intermediate promises returned by promiseFn to call a promise factory function when they become fulfilled, and return a final promise which is only fulfilled by the last intermediate promise.
The concept code which follows does not contain a call to reduce because no reduce operation is performed:
function promiseEach( array, promiseFn) {
var resolve, reject;
var counter = 0;
var final = new Promise( function( r, j) { resolve = r; reject = j});
function nextPromise() {
promiseFn( array[counter++])
.then( (counter < array.length ? nextPromise : resolve), reject);
}
if( array.length) {
nextPromise();
}
else {
reject( new Error("promiseEach called on empty array"));
}
return final;
}
Added Notes:
The promiseEach function above was tested in node/js using both a synchronous and asynchronous promiseFn with no concurrent updates to the data array.
Synchronous Test
function promiseSynch( z) {
return new Promise( function(resolve,reject){resolve(z);});
}
was able to process an array with 1 million (1000,000) entries) in about 10 seconds using a Win7 i86 notebook with 1GB memory and a browser, editor and task manager open at the same time. Memory usage was flat at around 80% of available memory.
Asynchronous Test
function promiseAsynch( z) {
var resolve;
function delayResolve() {
resolve( z);
}
return new Promise( ( r, j)=>{
resolve = r;
setTimeout(delayResolve,1);
});
}
managed an array of 100,000 entries in a little over 20 minutes on the same notebook, again with flat memory usage of around 80%.
Conclusion
The testing suggests that Promises are not causing memory leaks simply because they are being used, and that they should be capable of handling passing lengthy array data sequentially into a promise returning function.
This implies there is some other causes of memory allocation failure, or mysterious processing outages, which needs to be found before the problem can be fully solved. As a suggestion you might start with a search for memory leak issues related to the DB library being used.
I'm kind of new to Node, but I understand that writing syncronous functions is a bad thing. I locks up the event loop or something... So it's good to write everything asyncronous.
But in some cases, writing everything async could be bad. For example I have a function that makes an API call (to a 3rd party API service) and then I have to write the result to a database. I need to do this a bunch over a short period of time, for example 500 times.
Calling this API 500 times async and then writing to the database 500 times async would be probably ban me from the API service (throttling) and overload my database server.
What is the best way to control or limit something like this? I want to keep things async so it's effecient but I simply cannot take the above approach.
I've researched some Promise throttling approaches. Is that the correct way to approach this type of problem? Is there a better more appropriate way to do it?
The async npm package is wonderful and has several solutions that can be used in this particular situation. One approach is using a queue with a set concurrency limit (example taken directly from the async README):
// create a queue object with concurrency 2
var q = async.queue(function (task, callback) {
console.log('hello ' + task.name);
callback();
}, 2);
// assign a callback
q.drain = function() {
console.log('all items have been processed');
}
// add some items to the queue
q.push({name: 'foo'}, function (err) {
console.log('finished processing foo');
});
github.com/caolan/async#queue
In your particular situation, just wait to call the callback() until whatever timing or transaction detail you are waiting for has completed.
I am not sure how Promise throttle works under the hood, I believe Promise a better approach compared to setTimeout, with promise it is more event based, my issue with that npm package is, it offers no callback option once your call is done, my implemenation would be something like:
class PromiseThrottler {
constructor(maxParallelCalls) {
this.maxParallelCalls = maxParallelCalls;
this.currentCalls = 0; // flag holding the no. of parallel calls at any point
this.queue = []; // queue maintaining the waiting calls
}
// pormiseFn - the fn that wraps some promise call the we need to make, thenChain - callback once your async call is done, args- arguments that needs to be passed to the function
add(promiseFn, thenChain, ...args) {
this.queue.push({
promiseFn, thenChain, args
});
this.call();
}
call() {
if (!this.queue.length || this.currentCalls >= this.maxParallelCalls) return;
this.currentCalls++;
let obj = this.queue.shift();
let chain = obj.args.length ? obj.promiseFn(...obj.args) : obj.promiseFn();
if (obj.thenChain) chain.then(obj.thenChain);
chain
.catch(() => {})
.then(() => {
this.currentCalls--;
this.call();
});
this.call();
}
}
//usage
let PT = new PromiseThrottler(50)
, rn = max => Math.floor(Math.random() * max) // generate Random number
, randomPromise = id => new Promise(resolve => setTimeout(() => resolve(id), rn(5000))) // random promise generating function
, i = 1
, thenCall = id => {
console.log('resolved for id:', id);
let div = document.createElement('div');
div.textContent = `resolved for id: ${id}`;
document.body.appendChild(div);
};
while (++i < 501) PT.add(randomPromise, thenCall, i);
One simple way of limiting this is to use setTimeout and do a "recursive" loop like this.
function asyncLoop() {
makeAPICall(function(result) {
writeToDataBase(result, function() {
setTimeout(asyncLoop, 1000);
});
});
}
And of course you can also use that same strategy with promises.
Thats how I do it:
function processArray(array, index, callback) {
processItem(array[index], function(){
if(++index === array.length) {
callback();
return;
}
processArray(array, index, callback);
});
};
function processItem(item, callback) {
// do some ajax (browser) or request (node) stuff here
// when done
callback();
}
var arr = ["url1", "url2", "url3"];
processArray(arr, 0, function(){
console.log("done");
});
Is it any good? How to avoid those spaghetti'ish code?
Checkout the async library, it's made for control flow (async stuff) and it has a lot of methods for array stuff: each, filter, map. Check the documentation on github. Here's what you probably need:
each(arr, iterator, callback)
Applies an iterator function to each item in an array, in parallel. The iterator is called with an item from the list and a callback for when it has finished. If the iterator passes an error to this callback, the main callback for the each function is immediately called with the error.
eachSeries(arr, iterator, callback)
The same as each only the iterator is applied to each item in the array in series. The next iterator is only called once the current one has completed processing. This means the iterator functions will complete in order.
As pointed in some answer one can use "async" library. But sometimes you just don't want to introduce new dependency in your code. And below is another way how you can loop and wait for completion of some asynchronous functions.
var items = ["one", "two", "three"];
// This is your async function, which may perform call to your database or
// whatever...
function someAsyncFunc(arg, cb) {
setTimeout(function () {
cb(arg.toUpperCase());
}, 3000);
}
// cb will be called when each item from arr has been processed and all
// results are available.
function eachAsync(arr, func, cb) {
var doneCounter = 0,
results = [];
arr.forEach(function (item) {
func(item, function (res) {
doneCounter += 1;
results.push(res);
if (doneCounter === arr.length) {
cb(results);
}
});
});
}
eachAsync(items, someAsyncFunc, console.log);
Now, running node iterasync.js will wait for about three seconds and then print [ 'ONE', 'TWO', 'THREE' ]. This is a simple example, but it can be extended to handle many situations.
As correctly pointed out, you have to use setTimeout, for example:
each_async = function(ary, fn) {
var i = 0;
-function() {
fn(ary[i]);
if (++i < ary.length)
setTimeout(arguments.callee, 0)
}()
}
each_async([1,2,3,4], function(p) { console.log(p) })
The easiest way to handle async iteration of arrays (or any other iterable) is with the await operator (only in async functions) and for of loop.
(async function() {
for(let value of [ 0, 1 ]) {
value += await(Promise.resolve(1))
console.log(value)
}
})()
You can use a library to convert any functions you may need which accept callback to return promises.
In modern JavaScript there are interesting ways to extend an Array into an async itarable object.
Here I would like to demonstrate a skeleton of a totally new type AsyncArray which extends the Array type by inheriting it's goodness just to become an async iterable array.
This is only available in the modern engines. The code below uses the latest gimmicks like the private instance fields and for await...of.
If you are not familiar with them then I would advise you to have a look at the above linked topics in advance.
class AsyncArray extends Array {
#INDEX;
constructor(...ps){
super(...ps);
if (this.some(p => p.constructor !== Promise)) {
throw "All AsyncArray items must be a Promise";
}
}
[Symbol.asyncIterator]() {
this.#INDEX = 0;
return this;
};
next() {
return this.#INDEX < this.length ? this[this.#INDEX++].then(v => ({value: v, done: false}))
: Promise.resolve({done: true});
};
};
So an Async Iterable Array must contain promises. Only then it can return an iterator object which with every next() call returns a promise to eventually resolve into an object like {value : "whatever", done: false} or {done: true}. So basically everything returned is a promise here. The await abstraction unpacks the value within and gives it to us.
Now as I mentioned before, this AsyncArray type, since extended from Array, allows us to use those Array methods we are familiar with. That should simplify our job.
Let's see what happens;
class AsyncArray extends Array {
#INDEX;
constructor(...ps){
super(...ps);
if (this.some(p => p.constructor !== Promise)) {
throw "All AsyncArray items must be a Promise";
}
}
[Symbol.asyncIterator]() {
this.#INDEX = 0;
return this;
};
next() {
return this.#INDEX < this.length ? this[this.#INDEX++].then(v => ({value: v, done: false}))
: Promise.resolve({done: true});
};
};
var aa = AsyncArray.from({length:10}, (_,i) => new Promise(resolve => setTimeout(resolve,i*1000,[i,~~(Math.random()*100)])));
async function getAsycRandoms(){
for await (let random of aa){
console.log(`The Promise at index # ${random[0]} gets resolved with a random value of ${random[1]}`);
};
};
getAsycRandoms();
For modern Node.js:
To iterate through a collection truly asynchronously, you can try my tiny package with zero dependencies, compatible with ESM and CJS modules with .d.ts typings. Check the code it's really tiny.
https://www.npmjs.com/package/array-to-async-iterable
You can use it just like this:
for await(const el of new AsyncTimeIterator(arrayOfObjects)){
...
}
You can't just use for await of loop because of the JavaScript engines' microtasks and macrotasks nature.
In a brief, you won't get new HTTP requests and let other timers' callbacks to be executed with this code:
for await(const el of array){
...
}
You force V8 or the other engine to execute all the microtasks (your loop iteration) and when the loop completes you'll unblock the event loop and be ready to receive HTTP connections. So this code is completely useless.
Thats how I do it:
function processArray(array, index, callback) {
processItem(array[index], function(){
if(++index === array.length) {
callback();
return;
}
processArray(array, index, callback);
});
};
function processItem(item, callback) {
// do some ajax (browser) or request (node) stuff here
// when done
callback();
}
var arr = ["url1", "url2", "url3"];
processArray(arr, 0, function(){
console.log("done");
});
Is it any good? How to avoid those spaghetti'ish code?
Checkout the async library, it's made for control flow (async stuff) and it has a lot of methods for array stuff: each, filter, map. Check the documentation on github. Here's what you probably need:
each(arr, iterator, callback)
Applies an iterator function to each item in an array, in parallel. The iterator is called with an item from the list and a callback for when it has finished. If the iterator passes an error to this callback, the main callback for the each function is immediately called with the error.
eachSeries(arr, iterator, callback)
The same as each only the iterator is applied to each item in the array in series. The next iterator is only called once the current one has completed processing. This means the iterator functions will complete in order.
As pointed in some answer one can use "async" library. But sometimes you just don't want to introduce new dependency in your code. And below is another way how you can loop and wait for completion of some asynchronous functions.
var items = ["one", "two", "three"];
// This is your async function, which may perform call to your database or
// whatever...
function someAsyncFunc(arg, cb) {
setTimeout(function () {
cb(arg.toUpperCase());
}, 3000);
}
// cb will be called when each item from arr has been processed and all
// results are available.
function eachAsync(arr, func, cb) {
var doneCounter = 0,
results = [];
arr.forEach(function (item) {
func(item, function (res) {
doneCounter += 1;
results.push(res);
if (doneCounter === arr.length) {
cb(results);
}
});
});
}
eachAsync(items, someAsyncFunc, console.log);
Now, running node iterasync.js will wait for about three seconds and then print [ 'ONE', 'TWO', 'THREE' ]. This is a simple example, but it can be extended to handle many situations.
As correctly pointed out, you have to use setTimeout, for example:
each_async = function(ary, fn) {
var i = 0;
-function() {
fn(ary[i]);
if (++i < ary.length)
setTimeout(arguments.callee, 0)
}()
}
each_async([1,2,3,4], function(p) { console.log(p) })
The easiest way to handle async iteration of arrays (or any other iterable) is with the await operator (only in async functions) and for of loop.
(async function() {
for(let value of [ 0, 1 ]) {
value += await(Promise.resolve(1))
console.log(value)
}
})()
You can use a library to convert any functions you may need which accept callback to return promises.
In modern JavaScript there are interesting ways to extend an Array into an async itarable object.
Here I would like to demonstrate a skeleton of a totally new type AsyncArray which extends the Array type by inheriting it's goodness just to become an async iterable array.
This is only available in the modern engines. The code below uses the latest gimmicks like the private instance fields and for await...of.
If you are not familiar with them then I would advise you to have a look at the above linked topics in advance.
class AsyncArray extends Array {
#INDEX;
constructor(...ps){
super(...ps);
if (this.some(p => p.constructor !== Promise)) {
throw "All AsyncArray items must be a Promise";
}
}
[Symbol.asyncIterator]() {
this.#INDEX = 0;
return this;
};
next() {
return this.#INDEX < this.length ? this[this.#INDEX++].then(v => ({value: v, done: false}))
: Promise.resolve({done: true});
};
};
So an Async Iterable Array must contain promises. Only then it can return an iterator object which with every next() call returns a promise to eventually resolve into an object like {value : "whatever", done: false} or {done: true}. So basically everything returned is a promise here. The await abstraction unpacks the value within and gives it to us.
Now as I mentioned before, this AsyncArray type, since extended from Array, allows us to use those Array methods we are familiar with. That should simplify our job.
Let's see what happens;
class AsyncArray extends Array {
#INDEX;
constructor(...ps){
super(...ps);
if (this.some(p => p.constructor !== Promise)) {
throw "All AsyncArray items must be a Promise";
}
}
[Symbol.asyncIterator]() {
this.#INDEX = 0;
return this;
};
next() {
return this.#INDEX < this.length ? this[this.#INDEX++].then(v => ({value: v, done: false}))
: Promise.resolve({done: true});
};
};
var aa = AsyncArray.from({length:10}, (_,i) => new Promise(resolve => setTimeout(resolve,i*1000,[i,~~(Math.random()*100)])));
async function getAsycRandoms(){
for await (let random of aa){
console.log(`The Promise at index # ${random[0]} gets resolved with a random value of ${random[1]}`);
};
};
getAsycRandoms();
For modern Node.js:
To iterate through a collection truly asynchronously, you can try my tiny package with zero dependencies, compatible with ESM and CJS modules with .d.ts typings. Check the code it's really tiny.
https://www.npmjs.com/package/array-to-async-iterable
You can use it just like this:
for await(const el of new AsyncTimeIterator(arrayOfObjects)){
...
}
You can't just use for await of loop because of the JavaScript engines' microtasks and macrotasks nature.
In a brief, you won't get new HTTP requests and let other timers' callbacks to be executed with this code:
for await(const el of array){
...
}
You force V8 or the other engine to execute all the microtasks (your loop iteration) and when the loop completes you'll unblock the event loop and be ready to receive HTTP connections. So this code is completely useless.