Recursion & Async function does not respect/wait for promise - javascript

Situation:
I have a function which runs at the start of my code load_content:
async function load_content() {
console.log("I GOT HERE 1");
await load_js_files("./cmds/","commands")
console.log("I GOT HERE 2");
await load_js_files("./events/","events");
}
This function calls load_js_files twice, load_js_files is a recursive function which calls itself for each directory in the specified directory, 'requiring' each file found and doing different things if type = commands or type = events.
The function load_js_files looks like:
function load_js_files(dir,type){
fs.readdir(dir, (e, files) => {
if(e) console.error(e);
let jsfiles = files.filter(f => f.split(".").pop() === "js");
if(jsfiles.length <= 0){
console.log(`No commands to load from ${dir}!`);
return;
}
for(const file of files){
if(fs.lstatSync(dir+file).isDirectory()){
load_js_files(dir+file+"/",type)
}
}
if(type === "commands"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} commands from ${dir} ...`);
jsfiles.forEach((f,i) => {
let command = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
bot.commands.set(command.info.name, command);
});
} else if (type === "events"){
console.log("\x1b[4m%s\x1b[0m",`Loading ${jsfiles.length} events from ${dir} ...`);
jsfiles.forEach((f,i) => {
let event = require(`${dir}${f}`);
console.log(`${i + 1}: ${f} loaded!`);
let commands = [];
for(const cmd of bot.commands){
if(cmd[1].data) commands.push(cmd[1].data.toJSON());
}
if(event.once){
bot.once(event.name, (...args) => event.execute(...args, commands));
} else {
bot.on(event.name, (...args) => event.execute(...args, commands));
}
});
} else {
console.log(log_Red,"FUNCTION 'load_js_files' CALLED WITH INCORRECT 'type'.")
}
});
return new Promise((resolve,reject) => resolve("DONE"));
}
I would expect in load_content that the events occur in this order:
console logs I GOT HERE 1
load_js_files occurs with commands parameter (granted I haven't solved recursion promises yet it should run once at least)
console logs I GOT HERE 2
load_js_files occurs again but with events parameter.
Issue:
Upon running load_js_files requiring type = event the variable (bot.commands) is undefined. bot.commands is assigned values based in step 2 above, during the load_js_files call.
From what I can debug to, the initial function load_content does not respect (my understanding) of async/await, so I assume I am doing something incorrectly with promises.
In my console however the two console.log statements execute immidiatley & before the function is finished:
I GOT HERE 1
I GOT HERE 2
Loading 3 commands from ./cmds/ ...
1: createtables.js loaded!
2: ping.js loaded!
3: sqltest.js loaded!
Loading 1 events from ./events/ ...
1: ready.js loaded!
Loading 1 commands from ./cmds/settings/ ...
1: set.js loaded!
What I've tried:
I've tried the code noted above, additionally I have tried wrapping the second run of load_js_files in a .then(), I've tried a callback function & I've also tried nesting Promises but run into issues as load_js_files is calling itself recursively.
I'm having a hard time understanding if these Promises are going to work with this type of recursion (all recursions of load_js_files must finish before the second load_js_files is called within load_content).
Bonus points:
Bonus points if you can help me understand promises within a recursive function. I've read
https://blog.scottlogic.com/2017/09/14/asynchronous-recursion.html
https://www.bennadel.com/blog/3201-exploring-recursive-promises-in-javascript.htm
and
https://medium.com/#wrj111/recursive-promises-in-nodejs-769d0e4c0cf9
But it's not quite getting through.
Attempt at implementing David's answer:
This results in error, I believe related to fs.readdir(dir) requiring a callback.
Error: TypeError [ERR_INVALID_CALLBACK]: Callback must be a function. Received undefined
async function load_js_files_async_test(dir,type){
const files = fs.readdir(dir);
for (const file of files) {
const file_info = await lstat(dir + file);
if(file_info.isDirectory()){
await load_jsfiles_async_test(dir + file + "/", type);
} else {
console.log("Thanks David!");
}
}
}

does not respect/wait for promise
Sure it does. This is the Promise it's awaiting:
new Promise((resolve,reject) => resolve("DONE"))
That Promise of course finishes very quickly (and synchronously) and then the code moves on to the next task. But what isn't being awaited anywhere in the code is this asynchronous operation:
fs.readdir(dir, (e, files) => {
//...
});
This call to readdir is an asynchronous operation, and as such that callback function won't be invoked until after the current thread finishes everything it's doing. Which includes the "promises" being awaited (which don't do anything asynchronous) as well as the console.log statements and the next invocation of load_js_files.
Fortunately, Node provides Promise-based versions of these operations as well.
Simplifying the original code a bit, imagine instead this structure:
async function load_js_files(dir, type) {
const files = await readdir(dir);
for (const file of files) {
const fileInfo = await lstat(dir + file);
if(fileInfo.isDirectory()) {
await load_js_files(dir + file + "/", type)
}
}
// etc.
}
As you can see, this "reads" a lot more like synchronous operations. The idea here is to remove the usage of callback functions, which are essentially causing you confusion and making "awaiting" much more difficult. Now all of the logic in the load_js_files function is directly in the load_js_files function, not in other anonymous callback functions. And that logic proceeds, step by step, awaiting each asynchronous operation.
Then you can await the calls to load_js_files as expected.

The function
fs.readdir
Is a non blocking function which means that the code that you inserted there is not being 'awaited' to be done, you could try with fs.readdirSync and remove your return new Promise().

Related

Javascript/Node/JSON question, why is this not working?

I thought I understood what I was doing until this wasn't going in order. I am running this through Node, not through a browser.
It goes to the prompt at the end of the while loop first. I don't know why.
const fs = require('fs');
const prompt = require('prompt-sync')();
function jsonReader(filepath, cb){
fs.readFile(filepath, 'utf-8', (err, fileData) => {
if (err) { return cb && cb(err); }
try {
const object = JSON.parse(fileData);
console.log(object);
return cb && cb(null, object);
} catch (err) {
return cs && cb(err);
}
});
}
var exit = 0;
do {
jsonReader('./customer.json', (err, customer) => {
if (err) {
console.log('Error reading file:',err)
return
}
//customer.order_count +=1
const note = prompt("Enter a note for the JSON file: ");
customer.note = note;
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
if (err) {
console.log('Error writing file:',err)
} else {
console.log('File updated');
}
})
})
exit = prompt("Do you want to exit?");
} while (exit != 'y');
There are several things wrong with your code. The prompt problem you notice is just the beginning.
The way your code executes is like this:
// happens now
do {
// happens now
// ...
jsonReader('./customer.json', (err, customer) => {
// happens after readFile
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
// happens after writeFile
// ...
})
// happens after readFile
// ...
})
// happens now
exit = prompt("Do you want to exit?");
} while (exit != 'y');
// happens now
The time sequence is as follows:
1. Things that happens now
2. Things that happens after readFile
3. Things that happens after writeFile
It is obvious that you are outputting the prompt before either readFile or writeFile.
However there is another problem. You have an infinite while loop. In node.js and in fact in the browser I/O only happens when there is no javascript to execute - in other words it happens when the interpreter is idle. You are preventing the script from reaching the end of script with the while loop therefore the interpreter is never idle.
For your script what will happen after you fix the prompt issue is:
1. Things that happens now
2. loop into things that happens now
3. loop into things that happens now
4. loop into things that happens now
5. loop into things that happens now
..
∞. loop into things that happens now
Thus the readFile and writeFile never executes. You need to replace the while loop either with a recursive asynchronous call or using setTimeout() or setInterval() or use a while loop with async/await.
Here's an implementation with miniminal changes to your code:
function doIt () { // <----------- replace the do..while loop
// ...
jsonReader('./customer.json', (err, customer) => {
// ...
fs.writeFile('./customer.json', JSON.stringify(customer, null, 2), (err) =>{
// ...
var exit = prompt("Do you want to exit?");
if (exit !== 'y') {
doIt(); // <-------------- repeat the process again
}
})
})
}
doIt(); // <--------- don't forget to begin the whole thing
This is admittedly not as easy to read as using async/await but I leave that implementation as homework. Besides, it requires much more changes to your existing code.
It is because fs.readFile is asynchronous, so your second prompt is executing before fs.readFile has finished. You may want to use async functions and await or put the prompt at the end of the callback for jsonReader.

node async/await not working for me (when using Postgres / Node - working with DB updates before going to next call) [duplicate]

This question already has answers here:
Using async/await with a forEach loop
(33 answers)
Closed 3 years ago.
await is not blocking as expected, when a block of code updates db (using postgres / node )
https://node-postgres.com
I have a list of async function calls, each call udpates a database, and each subsequent call works on data updated by the previous call.
There are about eight calls in a row, and each call must update the complete set of data it is working with, 100% to completion, before going to the next.
I tried to make everything not async, but it appears I am forced to make everything async/await because of the library I am using (postgres / node).
Each function call must complete 100% before going on to the next function call, because the next step does a select on rows where a field is not null (where the previous step fills in a value).
I have an await in front of each call, that does something (see code below):
loads the db from a csv,
next step selects all rows just inserted, calls an API and updates the database,
and so on,
but at one point, when the next function executes, NONE of the rows have been updated (as I trace through and verify, a SQL statement returns nothing back),
the code seems to pass right through going to the second function call, not blocking, honoring the await, and completing it's code block.
If I comment out some of the latter rows (dependent on the previous), and let the program run to completion, the database gets updated.
There is nothing functionally wrong with the code, everything works, just not from beginning to completion.
After running two function calls at the beginning, letting that run, I can then comment out those rows, uncomment the later rows in the flow, and run again, and everything works as expected, but I cannot run to completion with both uncommented.
What can I do to make sure each function call completes 100%, has all updates completed in the database, before going to the next step?
async/await is not working for me.
this is not pseudo-code it's the actual code, that is executing, that I am working with, the function names changed only. It is real working code, cut-n-pasted direct from my IDE.
// these are functions I call below (each in their own .js)
const insert_rows_to_db_from_csv = require('./insert_rows_to_db_from_csv')
const call_api_using_rows_from_function_above = require('./call_api_using_rows_from_function_above')
const and_so_on = require('./and_so_on')
const and_so_on_and_on = require('./and_so_on_and_on')
const and_so_on_and_on_and_on = require('./and_so_on_and_on_and_on')
// each of the above exports a main() function where I can call func.main() just // like this one defined below (this is my main() entry point)
module.exports = {
main: async function (csvFilePath) {
console.log('service: upload.main()')
try {
const csvList = []
let rstream = fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', async () => {
let num_rows = csvList.length
//step one (if I run these two, with step two calls below commented out, this works)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
// step two
// blows up here, on the next function call,
// no rows selected in sql statements, must comment out, let the above run to
// completion, then comment out the rows above, and let these run separate
await work_with_rows_updated_in_previous_call_above.main(); // sets
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
})
} catch (err) {
console.log(err.stack)
} finally {
}
}
};
here is the one liner I am using to call the insert/update to the DB:
return await pool.query(sql, values);
that's it, nothing more. This is from using:
https://node-postgres.com/
npm install pg
PART 2 - continuing on,
I think the problem might be here. This is where I am doing each
API call, then insert (that the next function call is dependent upon), some code smell here that I can't sort out.
processBatch(batch) is called, that calls the API, gets a response back, and then within there it calls `handleResponseDetail(response), where the insert is happening. I think the problem is here, if there are any ideas?
this is a code block inside:
await call_api_using_rows_from_function_above.main();
It completes with no errors, inserts rows, and commits, then the next function is called, and this next function finds no rows (inserted here). But the await on the entire main() .js blocks and waits, so I don't understand.
/**
* API call, and within call handleResponse which does the DB insert.
* #param batch
* #returns {Promise<*>}
*/
async function processBatch(batch) {
console.log('Processing batch');
return await client.send(batch).then(res => {
return handleResponseDetail(res);
}).catch(err => handleError(err));
}
// should this be async?
function handleResponseDetail(response) {
response.lookups.forEach(async function (lookup) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
});
}
Given the code block from your Part 2 edit, the problem is now clear: all of your insert()s are being scheduled outside of the blocking context of the rest of your async/await code! This is because of that .forEach, see this question for more details.
I've annotated your existing code to show the issue:
function handleResponseDetail(response) { //synchronous function
response.lookups.forEach(async function (lookup) { //asynchronous function
//these async functions all get scheduled simultaneously
//without waiting for the previous one to complete - that's why you can't use forEach like this
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //this ONLY blocks the inner async function, not the outer `handleResponseDetail`
}
});
}
Here is a fixed version of that function which should work as you expect:
async function handleResponseDetail(response) {
for(const lookup of response.lookups) {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup); //blocks handleResponseDetail until done
}
}
}
Alternatively, if the order of insertion doesn't matter, you can use Promise.all for efficiency:
async function handleResponseDetail(response) {
await Promise.all(response.lookups.map(async lookup => {
if (typeof lookup.result[0] == "undefined") { // result[0] is Candidate #0
++lookup_fail;
console.log('No response from API for this address.')
} else {
++lookup_success;
const id = await insert(lookup);
}
})); //waits until all insertions have completed before returning
}
To reiterate, you cannot easily use .forEach() with async/await because .forEach() simply calls the given function for each element of the array synchronously, with no regard for awaiting each promise before calling the next. If you need the loop to block between each element, or to wait for all elements to complete processing before returning from the function (this is your use case), you need to use a different for loop or alternatively a Promise.all() as above.
What your main function currently does is merely creating stream, assigning listeners and instantly returning. It does not await for all the listeners to resolve like you are trying to have it do
You need to extract your file reading logic to another function, which will return a Promise that will resolve only when the entire file is read, then await for that Promise inside main
function getCsvList(csvFilePath) {
return new Promise((resolve, reject) => {
const csvList = []
fs.createReadStream(csvFilePath)
.pipe(csv())
.on('data', (data) => csvList.push(data))
.on('end', () => {
resolve(csvList)
})
.on('error', (e) => reject(e))
})
}
module.exports = {
main: async function (csvFilePath) {
try {
const csvList = await getCsvList(csvFilePath)
await insert_rows_to_db_from_csv.main(csvList);
await call_api_using_rows_from_function_above.main();
await work_with_rows_updated_in_previous_call_above.main();
await and_so_on.main();
await and_so_on_and_on.main();
await and_so_on_and_on_and_on.main();
} catch (err) {
console.log(err.stack)
} finally {
}
}
};

How to avoid an infinite loop in JavaScript

I have a Selenium webdriverIO V5 framework. The issue I am facing here is, the below code works fine on Mac OS, but it does not work correctly on the Windows OS. In the Windows OS it gets stuck with an infinite loop issue.
The below code functionality is: Merge yaml files (which contains locators) and return the value of the locator by passing the key:
const glob = require('glob');
const yamlMerge = require('yaml-merge');
const sleep = require('system-sleep');
let xpath;
class Page {
getElements(elementId) {
function objectCollector() {
glob('tests/wdio/locators/*.yml', function (er, files) {
if (er) throw er;
xpath = yamlMerge.mergeFiles(files);
});
do {
sleep(10);
} while (xpath === undefined);
return xpath;
}
objectCollector();
return xpath[elementId];
}
}
module.exports = new Page();
Since you are waiting on the results of a callback, I would recommend returning a new Promise from your getElements function and resolve() the value you receive inside the callback. Then when you call getElements, you will need to resolve that Promise or use the await notation. The function will stop at that point and wait until the Promise resolves, but the event loop will still continue. See some documentation for more information.
I'll write an example below of what your code might look like using a Promise, but when you call getElements, you will need to put the keyword await before it. If you want to avoid that, you could resolve the Promise from objectCollector while you're in getElements and remove the async keyword from its definition, but you really should not get in the way of asynchronous JavaScript. Also, you can probably shorten the code a bit because objectCollector looks like an unnecessary function in this example:
const glob = require('glob')
const yamlMerge = require('yaml-merge')
const sleep = require('system-sleep')
let xpath
class Page {
function async getElements(elementId) {
function objectCollector() {
return new Promise((resolve,reject) => {
glob('tests/wdio/locators/*.yml', function (er, files) {
if (er) reject(er)
resolve(yamlMerge.mergeFiles(files))
})
})
}
let xpath = await objectCollector()
return xpath[elementId]
}
}
module.exports = new Page();

How to produce a callback function inside functions [duplicate]

can any one give me a a simple example of nodeJs callbacks, I have already searched for the same on many websites but not able to understand it properly, Please give me a simple example.
getDbFiles(store, function(files){
getCdnFiles(store, function(files){
})
})
I want to do something like that...
var myCallback = function(data) {
console.log('got data: '+data);
};
var usingItNow = function(callback) {
callback('get it?');
};
Now open node or browser console and paste the above definitions.
Finally use it with this next line:
usingItNow(myCallback);
With Respect to the Node-Style Error Conventions
Costa asked what this would look like if we were to honor the node error callback conventions.
In this convention, the callback should expect to receive at least one argument, the first argument, as an error. Optionally we will have one or more additional arguments, depending on the context. In this case, the context is our above example.
Here I rewrite our example in this convention.
var myCallback = function(err, data) {
if (err) throw err; // Check for the error and throw if it exists.
console.log('got data: '+data); // Otherwise proceed as usual.
};
var usingItNow = function(callback) {
callback(null, 'get it?'); // I dont want to throw an error, so I pass null for the error argument
};
If we want to simulate an error case, we can define usingItNow like this
var usingItNow = function(callback) {
var myError = new Error('My custom error!');
callback(myError, 'get it?'); // I send my error as the first argument.
};
The final usage is exactly the same as in above:
usingItNow(myCallback);
The only difference in behavior would be contingent on which version of usingItNow you've defined: the one that feeds a "truthy value" (an Error object) to the callback for the first argument, or the one that feeds it null for the error argument.
A callback function is simply a function you pass into another function so that function can call it at a later time. This is commonly seen in asynchronous APIs; the API call returns immediately because it is asynchronous, so you pass a function into it that the API can call when it's done performing its asynchronous task.
The simplest example I can think of in JavaScript is the setTimeout() function. It's a global function that accepts two arguments. The first argument is the callback function and the second argument is a delay in milliseconds. The function is designed to wait the appropriate amount of time, then invoke your callback function.
setTimeout(function () {
console.log("10 seconds later...");
}, 10000);
You may have seen the above code before but just didn't realize the function you were passing in was called a callback function. We could rewrite the code above to make it more obvious.
var callback = function () {
console.log("10 seconds later...");
};
setTimeout(callback, 10000);
Callbacks are used all over the place in Node because Node is built from the ground up to be asynchronous in everything that it does. Even when talking to the file system. That's why a ton of the internal Node APIs accept callback functions as arguments rather than returning data you can assign to a variable. Instead it will invoke your callback function, passing the data you wanted as an argument. For example, you could use Node's fs library to read a file. The fs module exposes two unique API functions: readFile and readFileSync.
The readFile function is asynchronous while readFileSync is obviously not. You can see that they intend you to use the async calls whenever possible since they called them readFile and readFileSync instead of readFile and readFileAsync. Here is an example of using both functions.
Synchronous:
var data = fs.readFileSync('test.txt');
console.log(data);
The code above blocks thread execution until all the contents of test.txt are read into memory and stored in the variable data. In node this is typically considered bad practice. There are times though when it's useful, such as when writing a quick little script to do something simple but tedious and you don't care much about saving every nanosecond of time that you can.
Asynchronous (with callback):
var callback = function (err, data) {
if (err) return console.error(err);
console.log(data);
};
fs.readFile('test.txt', callback);
First we create a callback function that accepts two arguments err and data. One problem with asynchronous functions is that it becomes more difficult to trap errors so a lot of callback-style APIs pass errors as the first argument to the callback function. It is best practice to check if err has a value before you do anything else. If so, stop execution of the callback and log the error.
Synchronous calls have an advantage when there are thrown exceptions because you can simply catch them with a try/catch block.
try {
var data = fs.readFileSync('test.txt');
console.log(data);
} catch (err) {
console.error(err);
}
In asynchronous functions it doesn't work that way. The API call returns immediately so there is nothing to catch with the try/catch. Proper asynchronous APIs that use callbacks will always catch their own errors and then pass those errors into the callback where you can handle it as you see fit.
In addition to callbacks though, there is another popular style of API that is commonly used called the promise. If you'd like to read about them then you can read the entire blog post I wrote based on this answer here.
Here is an example of copying text file with fs.readFile and fs.writeFile:
var fs = require('fs');
var copyFile = function(source, destination, next) {
// we should read source file first
fs.readFile(source, function(err, data) {
if (err) return next(err); // error occurred
// now we can write data to destination file
fs.writeFile(destination, data, next);
});
};
And that's an example of using copyFile function:
copyFile('foo.txt', 'bar.txt', function(err) {
if (err) {
// either fs.readFile or fs.writeFile returned an error
console.log(err.stack || err);
} else {
console.log('Success!');
}
});
Common node.js pattern suggests that the first argument of the callback function is an error. You should use this pattern because all control flow modules rely on it:
next(new Error('I cannot do it!')); // error
next(null, results); // no error occurred, return result
Try this example as simple as you can read, just copy save newfile.js do node newfile to run the application.
function myNew(next){
console.log("Im the one who initates callback");
next("nope", "success");
}
myNew(function(err, res){
console.log("I got back from callback",err, res);
});
we are creating a simple function as
callBackFunction (data, function ( err, response ){
console.log(response)
})
// callbackfunction
function callBackFuntion (data, callback){
//write your logic and return your result as
callback("",result) //if not error
callback(error, "") //if error
}
//delay callback function
function delay (seconds, callback){
setTimeout(() =>{
console.log('The long delay ended');
callback('Task Complete');
}, seconds*1000);
}
//Execute delay function
delay(1, res => {
console.log(res);
})
const fs = require('fs');
fs.stat('input.txt', function (err, stats) {
if(err){
console.log(err);
} else {
console.log(stats);
console.log('Completed Reading File');
}
});
'fs' is a node module which helps you to read file.
Callback function will make sure that your file named 'input.txt' is completely read before it gets executed.
fs.stat() function is to get file information like file size, date created and date modified.
This blog-post has a good write-up:
https://codeburst.io/javascript-what-the-heck-is-a-callback-aba4da2deced
function doHomework(subject, callback) {
alert(`Starting my ${subject} homework.`);
callback();
}
function alertFinished(){
alert('Finished my homework');
}
doHomework('math', alertFinished);
A callback is a function passed as an parameter to a Higher Order Function (wikipedia). A simple implementation of a callback would be:
const func = callback => callback('Hello World!');
To call the function, simple pass another function as argument to the function defined.
func(string => console.log(string));
// Traditional JS way
function display(result) {
console.log("Sum of given numbers is", result);
}
function calculateSum(num1, num2, callback) {
console.log("FIrst number is", num1, "and second number is", num2);
const result = num1 + num2;
callback(result);
}
calculateSum(10, 20, display);
// Node JS way
const display = function(result) {
console.log("Sum of given numbers is", result);
}
const calculateSum = function(num1, num2, callback) {
console.log("FIrst number is", num1, "and second number is", num2);
const result = num1 + num2;
callback(result);
}
calculateSum(10, 20, display);
// By using anonymous function
const calculateSum = function(num1, num2, callback) {
console.log("FIrst number is", num1, "and second number is", num2);
const result = num1 + num2;
callback(result);
}
calculateSum(10,20, function(result) {
console.log("Sum of given numbers is", result)
});
// By using arrow function
const calculateSum = function(num1, num2, callback) {
console.log("FIrst number is", num1, "and second number is", num2);
const result = num1 + num2;
callback(result);
}
calculateSum(10, 20, x => console.log("Sum of given numbers is", x));

Is Node.js native Promise.all processing in parallel or sequentially?

I would like to clarify this point, as the documentation is not too clear about it;
Q1: Is Promise.all(iterable) processing all promises sequentially or in parallel? Or, more specifically, is it the equivalent of running chained promises like
p1.then(p2).then(p3).then(p4).then(p5)....
or is it some other kind of algorithm where all p1, p2, p3, p4, p5, etc. are being called at the same time (in parallel) and results are returned as soon as all resolve (or one rejects)?
Q2: If Promise.all runs in parallel, is there a convenient way to run an iterable sequencially?
Note: I don't want to use Q, or Bluebird, but all native ES6 specs.
Is Promise.all(iterable) executing all promises?
No, promises cannot "be executed". They start their task when they are being created - they represent the results only - and you are executing everything in parallel even before passing them to Promise.all.
Promise.all does only await multiple promises. It doesn't care in what order they resolve, or whether the computations are running in parallel.
is there a convenient way to run an iterable sequencially?
If you already have your promises, you can't do much but Promise.all([p1, p2, p3, …]) (which does not have a notion of sequence). But if you do have an iterable of asynchronous functions, you can indeed run them sequentially. Basically you need to get from
[fn1, fn2, fn3, …]
to
fn1().then(fn2).then(fn3).then(…)
and the solution to do that is using Array::reduce:
iterable.reduce((p, fn) => p.then(fn), Promise.resolve())
In parallel
await Promise.all(items.map(async (item) => {
await fetchItem(item)
}))
Advantages: Faster. All iterations will be started even if one fails later on. However, it will "fail fast". Use Promise.allSettled, to complete all iterations in parallel even if some throw. Technically, these are concurrent invocations not in parallel.
In sequence
for (const item of items) {
await fetchItem(item)
}
Advantages: Variables in the loop can be shared by each iteration. Behaves like normal imperative synchronous code.
NodeJS does not run promises in parallel, it runs them concurrently since it’s a single-threaded event loop architecture. There is a possibility to run things in parallel by creating a new child process to take advantage of the multiple core CPU.
Parallel Vs Concurent
In fact, what Promise.all does is, stacking the promises function in the appropriate queue (see event loop architecture) running them concurrently (call P1, P2,...) then waiting for each result, then resolving the Promise.all with all the promises results.
Promise.all will fail at the first promise which fails unless you have to manage the rejection yourself.
There is a major difference between parallel and concurrent, the first one will run a different computation in a separate process at exactly the same time and they will progress at their rhythm, while the other one will execute the different computation one after another without waiting for the previous computation to finish and progress at the same time without depending on each other.
Finally, to answer your question, Promise.all will execute neither in parallel nor sequentially but concurrently.
Bergi's answer got me on the right track using Array.reduce.
However, to actually get the functions returning my promises to execute one after another I had to add some more nesting.
My real use case is an array of files that I need to transfer in order one after another due to limits downstream...
Here is what I ended up with:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(() => {
return transferFile(theFile); //function returns a promise
});
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
As previous answers suggest, using:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(transferFile(theFile));
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
didn't wait for the transfer to complete before starting another and also the "All files transferred" text came before even the first file transfer was started.
Not sure what I did wrong, but wanted to share what worked for me.
Edit: Since I wrote this post I now understand why the first version didn't work. then() expects a function returning a promise. So, you should pass in the function name without parentheses! Now, my function wants an argument so then I need to wrap in in a anonymous function taking no argument!
You can also process an iterable sequentially with an async function using a recursive function. For example, given an array a to process with asynchronous function someAsyncFunction():
var a = [1, 2, 3, 4, 5, 6]
function someAsyncFunction(n) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log("someAsyncFunction: ", n)
resolve(n)
}, Math.random() * 1500)
})
}
//You can run each array sequentially with:
function sequential(arr, index = 0) {
if (index >= arr.length) return Promise.resolve()
return someAsyncFunction(arr[index])
.then(r => {
console.log("got value: ", r)
return sequential(arr, index + 1)
})
}
sequential(a).then(() => console.log("done"))
Just to elaborate on #Bergi's answer (which is very succinct, but tricky to understand ;)
This code will run each item in the array and add the next 'then chain' to the end:
function eachorder(prev,order) {
return prev.then(function() {
return get_order(order)
.then(check_order)
.then(update_order);
});
}
orderArray.reduce(eachorder,Promise.resolve());
Using async await an array of promises can easily be executed sequentially:
let a = [promise1, promise2, promise3];
async function func() {
for(let i=0; i<a.length; i++){
await a[i]();
}
}
func();
Note: In above implementation, if a promise is rejected, the rest wouldn't be executed.If you want all your promises to be executed, then wrap your await a[i](); inside try catch
parallel
see this example
const resolveAfterTimeout = async i => {
return new Promise(resolve => {
console.log("CALLED");
setTimeout(() => {
resolve("RESOLVED", i);
}, 5000);
});
};
const call = async () => {
const res = await Promise.all([
resolveAfterTimeout(1),
resolveAfterTimeout(2),
resolveAfterTimeout(3),
resolveAfterTimeout(4),
resolveAfterTimeout(5),
resolveAfterTimeout(6)
]);
console.log({ res });
};
call();
by running the code it'll console "CALLED" for all six promises and when they are resolved it will console every 6 responses after timeout at the same time
I stumbled across this page while trying to solve a problem in NodeJS: reassembly of file chunks. Basically:
I have an array of filenames.
I need to append all those files, in the correct order, to create one large file.
I must do this asynchronously.
Node's 'fs' module does provide appendFileSync but I didn't want to block the server during this operation. I wanted to use the fs.promises module and find a way to chain this stuff together. The examples on this page didn't quite work for me because I actually needed two operations: fsPromises.read() to read in the file chunk, and fsPromises.appendFile() to concat to the destination file. Maybe if I was better with JavaScript I could have made the previous answers work for me. ;-)
I stumbled across this and I was able to hack together a working solution:
/**
* sequentially append a list of files into a specified destination file
*/
exports.append_files = function (destinationFile, arrayOfFilenames) {
return arrayOfFilenames.reduce((previousPromise, currentFile) => {
return previousPromise.then(() => {
return fsPromises.readFile(currentFile).then(fileContents => {
return fsPromises.appendFile(destinationFile, fileContents);
});
});
}, Promise.resolve());
};
And here's a jasmine unit test for it:
const fsPromises = require('fs').promises;
const fsUtils = require( ... );
const TEMPDIR = 'temp';
describe("test append_files", function() {
it('append_files should work', async function(done) {
try {
// setup: create some files
await fsPromises.mkdir(TEMPDIR);
await fsPromises.writeFile(path.join(TEMPDIR, '1'), 'one');
await fsPromises.writeFile(path.join(TEMPDIR, '2'), 'two');
await fsPromises.writeFile(path.join(TEMPDIR, '3'), 'three');
await fsPromises.writeFile(path.join(TEMPDIR, '4'), 'four');
await fsPromises.writeFile(path.join(TEMPDIR, '5'), 'five');
const filenameArray = [];
for (var i=1; i < 6; i++) {
filenameArray.push(path.join(TEMPDIR, i.toString()));
}
const DESTFILE = path.join(TEMPDIR, 'final');
await fsUtils.append_files(DESTFILE, filenameArray);
// confirm "final" file exists
const fsStat = await fsPromises.stat(DESTFILE);
expect(fsStat.isFile()).toBeTruthy();
// confirm content of the "final" file
const expectedContent = new Buffer('onetwothreefourfive', 'utf8');
var fileContents = await fsPromises.readFile(DESTFILE);
expect(fileContents).toEqual(expectedContent);
done();
}
catch (err) {
fail(err);
}
finally {
}
});
});
You can do it by for loop.
async function return promise:
async function createClient(client) {
return await Client.create(client);
}
let clients = [client1, client2, client3];
if you write following code then client are created parallelly:
const createdClientsArray = yield Promise.all(clients.map((client) =>
createClient(client);
));
But if you want to create client sequentially then you should use for loop:
const createdClientsArray = [];
for(let i = 0; i < clients.length; i++) {
const createdClient = yield createClient(clients[i]);
createdClientsArray.push(createdClient);
}
Bergi's answer helped me to make the call synchronous. I have added an example below where we call each function after the previous function is called:
function func1 (param1) {
console.log("function1 : " + param1);
}
function func2 () {
console.log("function2");
}
function func3 (param2, param3) {
console.log("function3 : " + param2 + ", " + param3);
}
function func4 (param4) {
console.log("function4 : " + param4);
}
param4 = "Kate";
//adding 3 functions to array
a=[
()=>func1("Hi"),
()=>func2(),
()=>func3("Lindsay",param4)
];
//adding 4th function
a.push(()=>func4("dad"));
//below does func1().then(func2).then(func3).then(func4)
a.reduce((p, fn) => p.then(fn), Promise.resolve());
I've been using for of in order to solve sequential promises. I'm not sure if it helps here but this is what I've been doing.
async function run() {
for (let val of arr) {
const res = await someQuery(val)
console.log(val)
}
}
run().then().catch()
Yes, you can chain an array of promise returning functions as follows
(this passes the result of each function to the next). You could of course edit it to pass the same argument (or no arguments) to each function.
function tester1(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a + 1);
}, 1000);
})
}
function tester2(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a * 5);
}, 1000);
})
}
function promise_chain(args, list, results) {
return new Promise(function(done, errs) {
var fn = list.shift();
if (results === undefined) results = [];
if (typeof fn === 'function') {
fn(args).then(function(result) {
results.push(result);
console.log(result);
promise_chain(result, list, results).then(done);
}, errs);
} else {
done(results);
}
});
}
promise_chain(0, [tester1, tester2, tester1, tester2, tester2]).then(console.log.bind(console), console.error.bind(console));
see this sample
Promise.all working parallel
const { range, random, forEach, delay} = require("lodash");
const run = id => {
console.log(`Start Task ${id}`);
let prom = new Promise((resolve, reject) => {
delay(() => {
console.log(`Finish Task ${id}`);
resolve(id);
}, random(2000, 15000));
});
return prom;
}
const exec = () => {
let proms = [];
forEach(range(1,10), (id,index) => {
proms.push(run(id));
});
let allPromis = Promise.all(proms);
allPromis.then(
res => {
forEach(res, v => console.log(v));
}
);
}
exec();

Categories

Resources