How do I know which promise will resolve first? - javascript

I have a snippet that I'm playing around with where I've purposely placed Promise.resolve(c) before Promise.resolve(a) but I am noticing that the true value from bottom Promise.all(...) is being logged first.
const https = require('https');
function getData(substr) {
let url = ``;
return new Promise((resolve, reject) => {
try {
https.get(url, res => {
let s = '';
res.on('data', (d) => {
s += d
});
res.on('end', () => {
let data = JSON.parse(s);
resolve(data);
})
})
} catch (e) {
reject(e);
}
})
}
let a = getData('spiderman');
let b;
let c = getData('superman');
Promise.resolve(c)
.then(value => console.log(value));
Promise.resolve(a)
.then(value => b = value);
Promise.all([a]).then(values => console.log(values[0] === b));
Is there any explanation as to why this is the case? I would think that since Promise.resolve(c) comes first, that the Promise c would resolve first and be logged. Or is there no way to actually rely on the order of the logs aside from using callbacks? What are the best practices to avoid such issues?

These are asynchronous calls that on complete resolve using the supplied function passed into then.
There is no way to force one of these to finish before and if you need that you should be running these synchronous.
When you call .then() this only gets called when the function finished not when the .resolve gets called.

Promise resolution order is not guaranteed. That's kinda the point of asynchronous code.
If you rely on order of execution, you should be using synchronous code.

Use Promise.all() with both requests if you want them both completed before you do your next step.
let a = getData('spiderman');
let c = getData('superman');
Promise.all([a, c]).then(([spidyResults, superResults]) => {
// both have sompleted
// do stuff with each set of results
}).catch(err => console.log('Ooops, one of them failed') );

Well, as said before, the order of the promise completion is not guaranteed, because it's an async operation.
If you really need the execution to follow a certain order you need to put the the next thing to be executed inside then or use async and await which is much cleaner.

Related

Javascript: Making sure one async function doesn't run until the other one is complete; working with promises

I'm working with fetching information from a github repository. I want to get the list of pull requests within that repo, get the list of commits associated with each pull request, then for each commit I want to get information such as the author of the commit, the number of files associated with each commit and the number of additions and deletions made to each file. I'm using axios and the github API to accomplish this. I know how to work with the API, but the promises and async functions are keeping me from accomplishing my task. I have the following code:
const axios = require('axios');
var mapOfInformationObjects = new Map();
var listOfCommits = [];
var listOfSHAs = [];
var gitApiPrefix = link I'll use to start fetching data;
var listOfPullRequestDataObjects = [];
var listOfPullRequestNumbers = [];
var mapOfPullNumberToCommits = new Map();
function getAllPullRequests(gitPullRequestApiLink) {
return new Promise((resolve, reject) => {
axios.get(gitPullRequestApiLink).then((response) =>{
listOfPullRequestDataObjects = response['data'];
var k;
for (k = 0; k < listOfPullRequestDataObjects.length; k++){
listOfPullRequestNumbers.push(listOfPullRequestDataObjects[k]['number']);
}
resolve(listOfPullRequestNumbers);
}).catch((error) => {
reject(error);
})
})
}
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
var j;
for (j = 0; j < listOfPRNumbers.length; j++) {
currPromise = new Promise((resolve, reject) => {
currentGitApiLink = gitApiPrefix + listOfPRNumbers[j] + "/commits";
axios.get(currentGitApiLink).then((response) => {
mapOfPullNumberToCommits.set(listOfPRNumbers[j], response['data']);
resolve("Done with Pull Request Number: " + listOfPRNumbers[j]);
}).catch((error) => {
reject(error);
})
})
}
}
function getListOfCommits(gitCommitApiLink){
return new Promise((resolve, reject) => {
axios.get(gitCommitApiLink).then((response) => {
resolve(response);
}).catch((error) => {
reject(error);
})
})
}
So far, I made some functions that I would like to call sequentially.
First I'd like to call getAllPullRequestNumbers(someLink)
Then I'd like to call getCommitsForEachPullRequestNumber(listofprnumbers)
Then getListOfCommits(anotherLink)
So it would look something like
getAllPullRequestNumbers(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
But two problems arise:
1) I'm not sure if this is how you would call the functions so that the first function in the sequence completes before the other.
2) Because I'm not familiar with Javascript, I'm not sure, especially with the getCommitsForEachPullRequestNumber function since you run a loop and call axios.get() on each iteration of the loop, if this is how you work with promises within the functions.
Would this be how you would go about accomplishing these two tasks? Any help is much appreciated. Thanks!
When you a number of asynchronous operations (represented by promises) that you can run all together and you want to know when they are all done, you use Promise.all(). You collect an array of promises and pass it to Promise.all() and it will tell you when they have all completed or when one of them triggers an error. If all completed, Promise.all() will return a promise that resolves to an array of results (one for each asynchronous operation).
When you're iterating an array to do your set of asynchronous operations, it then works best to use .map() because that helps you create a parallel array of promises that you can feed to Promise.all(). Here's how you do that in getCommitsForEachPullRequestNumber():
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
let mapOfPullNumberToCommits = new Map();
return Promise.all(listOfPRNumbers.map(item => {
let currentGitApiLink = gitApiPrefix + item + "/commits";
return axios.get(currentGitApiLink).then(response => {
// put data into the map
mapOfPullNumberToCommits.set(item, response.data);
});
})).then(() => {
// make resolved value be the map we created, now that everything is done
return mapOfPullNumberToCommits;
});
}
// usage:
getCommitsForEachPullRequestNumber(list).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
Then, in getListOfCommits(), since axios already returns a promise, there is no reason to wrap it in a manually created promise. That is, in fact, consider a promise anti-pattern. Instead, just return the promise that axios already returns. In fact, there's probably not even a reason to have this as a function since one can just use axios.get() directly to achieve the same result:
function getListOfCommits(gitCommitApiLink){
return axios.get(gitCommitApiLink);
}
Then, in getAllPullRequests() it appears you are just doing one axios.get() call and then processing the results. That can be done like this:
function getAllPullRequests(gitPullRequestApiLink) {
return axios.get(gitPullRequestApiLink).then(response => {
let listOfPullRequestDataObjects = response.data;
return listOfPullRequestDataObjects.map(item => {
return item.number;
});
});
}
Now, if you're trying to execute these three operations sequentially in this order:
getAllPullRequests(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
You can chain the promises from those three operations together to sequence them:
getAllPullRequests(someLink)
.then(getCommitsForEachPullRequestNumber)
.then(mapOfPullNumberToCommits => {
// not entirely sure what you want to do here, perhaps
// call getListOfCommits on each item in the map?
}).catch(err => {
console.log(err);
});
Or, if you put this code in an async function, then you can use async/awit:
async function getAllCommits(someLink) {
let pullRequests = await getAllPullRequests(someLink);
let mapOfPullNumberToCommits = await getCommitsForEachPullRequestNumber(pullRequests);
// then use getlistOfCommits() somehow to process mapOfPullNumberToCommits
return finalResults;
}
getAllCommits.then(finalResults => {
console.log(finalResults);
}).catch(err => {
console.log(err);
});
not as clean as jfriend00 solution,
but I played with your code and it finally worked
https://repl.it/#gui3/githubApiPromises
you get the list of commits in the variable listOfCommits
I don't understand the purpose of your last function, so I dropped it

Sequential execution of Promise.all

Hi I need to execute promises one after the other how do I achieve this using promise.all any help would be awesome. Below is the sample of my code I am currently using but it executes parallel so the search will not work properly
public testData: any = (req, res) => {
// This method is called first via API and then promise is triggerd
var body = req.body;
// set up data eg 2 is repeated twice so insert 2, 5 only once into DB
// Assuming we cant control the data and also maybe 3 maybe inside the DB
let arrayOfData = [1,2,3,2,4,5,5];
const promises = arrayOfData.map(this.searchAndInsert.bind(this));
Promise.all(promises)
.then((results) => {
// we only get here if ALL promises fulfill
console.log('Success', results);
res.status(200).json({ "status": 1, "message": "Success data" });
})
.catch((err) => {
// Will catch failure of first failed promise
console.log('Failed:', err);
res.status(200).json({ "status": 0, "message": "Failed data" });
});
}
public searchAndInsert: any = (data) => {
// There are database operations happening here like searching for other
// entries in the JSON and inserting to DB
console.log('Searching and updating', data);
return new Promise((resolve, reject) => {
// This is not an other function its just written her to make code readable
if(dataExistsInDB(data) == true){
resolve(data);
} else {
// This is not an other function its just written her to make code readable
insertIntoDB(data).then() => resolve(data);
}
});
}
I looked up in google and saw the reduce will help I would appreciate any help on how to convert this to reduce or any method you suggest (Concurrency in .map did not work)
the Promises unfortunatelly does not allow any control of their flow. It means -> once you create new Promise, it will be doing its asynchronous parts as they like.
The Promise.all does not change it, its only purpose is that it checks all promises that you put into it and it is resolved once all of them are finished (or one of them fail).
To be able to create and control asynchronous flow, the easiest way is to wrap the creation of Promise into function and create some kind of factory method. Then instead of creating all promises upfront, you just create only one promise when you need it, wait until it is resolved and after it continue in same behaviour.
async function doAllSequentually(fnPromiseArr) {
for (let i=0; i < fnPromiseArr.length; i++) {
const val = await fnPromiseArr[i]();
console.log(val);
}
}
function createFnPromise(val) {
return () => new Promise(resolve => resolve(val));
}
const arr = [];
for (let j=0; j < 10; j++) {
arr.push(createFnPromise(Math.random()));
}
doAllSequentually(arr).then(() => console.log('finished'));
PS: It is also possible without async/await using standard promise-chains, but it requires to be implemented with recursion.
If anyone else cares about ESLint complaining about the use of "for" and the "no await in loop" here is a typescript ESLint friendly version of the above answer:
async function runPromisesSequentially<T>(promises: Array<Promise<T>>):Promise<Array<T>> {
if (promises.length === 0) return [];
const [firstElement, ...rest] = promises;
return [await firstElement, ...(await runPromisesSequentially(rest))];
}
You can then just replace Promise.all by runPromisesSequentially.
#lmX2015's answer is close but it's taking in promises that have already started executing.
A slight modification fixes it
export async function runPromisesSequentially<T>(functions: (() => Promise<T>)[]): Promise<T[]> {
if (functions.length === 0) {
return [];
}
const [first, ...rest] = functions;
return [await first(), ...(await runPromisesSequentially(rest))];
}

Node.js line by line with async operation

I am attempting to read a stream line by line and for each line do some async processing. The issue I'm having is how to determine when the operations for all lines are complete. I thought I could use the readline "close" event but that seems to be triggered long before the async operations I've started on the "line" event complete.
Here's the event handlers in question:
logReader.on("line", inputLine => {
self._processLogLine(inputLine, () => {
if (self.streamFinished) {
completionCallback(err);
}
});
});
logReader.on("close", () => {
self.streamFinished = true;
});
The completionCallback method should be called when all line processing on the stream is done.
I've thought about adding counters for line operations started/completed and calling the completionCallback when they match but that seems awkward.
Here is one way you could do it basically keeping track of each lines success. I don't have enough of your code to test it...
const lineProcessing = [];
const doCompletion = () => {
if (self.streamFinished && !lineProcessing.includes(false)) {
// Not sure where 'err' comes from
completionCallback(err);
}
}
logReader.on("line", inputLine => {
lineProcessing.push(false);
let processingIndex = lineProcessing.length - 1;
self._processLogLine(inputLine, () => {
lineProcessing[processingIndex] = true;
doCompletion();
});
});
logReader.on("close", () => {
self.streamFinished = true;
});
It's a little difficult to tell for sure what needs to happen without more context. But here's a quick and somewhat dirty fix that should be adaptable to your situation. It shows how you can accomplish this with promises:
let promises = []
logReader.on("line", inputLine => {
promises.push(new Promise(resolve => {
self._processLogLine(inputLine, resolve)
})
})
logReader.on("close", () => {
Promise.all(promises)
.then(completionCallback)
})
Essentially, just create an array of promises. When you know that all promises have been added to the array, call Promise.all() on it.
If you use this approach, you'd probably want to add error checking (rejecting and catching).
Look into generators. They give you a way to do a "for" loop...and when you're done, you just return from the generator function.
So you can have a generator to read the file, and a function that uses the generator, then when the loop ends, call your callback.

JS Promise - instantly retrieve some data from a function that returns a Promise

Can anyone recommend a pattern for instantly retrieving data from a function that returns a Promise?
My (simplified) example is an AJAX preloader:
loadPage("index.html").then(displayPage);
If this is downloading a large page, I want to be able to check what's happening and perhaps cancel the process with an XHR abort() at a later stage.
My loadPage function used to (before Promises) return an id that let me do this later:
var loadPageId = loadPage("index.html",displayPage);
...
doSomething(loadPageId);
cancelLoadPage(loadPageId);
In my new Promise based version, I'd imagine that cancelLoadPage() would reject() the original loadPage() Promise.
I've considered a few options all of which I don't like. Is there a generally accepted method to achieve this?
Okay, let's address your bounty note first.
[Hopefully I'll be able to grant the points to someone who says more than "Don't use promises"... ]
Sorry, but the answer here is: "Don't use promises". ES6 Promises have three possible states (to you as a user): Pending, Resolved and Rejected (names may be slightly off).
There is no way for you to see "inside" of a promise to see what has been done and what hasn't - at least not with native ES6 promises. There was some limited work (in other frameworks) done on promise notifications, but those did not make it into the ES6 specification, so it would be unwise of you to use this even if you found an implementation for it.
A promise is meant to represent an asynchronous operation at some point in the future; standalone, it isn't fit for this purpose. What you want is probably more akin to an event publisher - and even that is asynchronous, not synchronous.
There is no safe way for you to synchronously get some value out of an asynchronous call, especially not in JavaScript. One of the main reasons for this is that a good API will, if it can be asynchronous, will always be asynchronous.
Consider the following example:
const promiseValue = Promise.resolve(5)
promiseValue.then((value) => console.log(value))
console.log('test')
Now, let's assume that this promise (because we know the value ahead of time) is resolved synchronously. What do you expect to see? You'd expect to see:
> 5
> test
However, what actually happens is this:
> test
> 5
This is because even though Promise.resolve() is a synchronous call that resolves an already-resolved Promise, then() will always be asynchronous; this is one of the guarantees of the specification and it is a very good guarantee because it makes code a lot easier to reason about - just imagine what would happen if you tried to mix synchronous and asynchronous promises.
This applies to all asynchronous calls, by the way: any action in JavaScript that could potentially be asynchronous will be asynchronous. As a result, there is no way for you do any kind of synchronous introspection in any API that JavaScript provides.
That's not to say you couldn't make some kind of wrapper around a request object, like this:
function makeRequest(url) {
const requestObject = new XMLHttpRequest()
const result = {
}
result.done = new Promise((resolve, reject) => {
requestObject.onreadystatechange = function() {
..
}
})
requestObject.open(url)
requestObject.send()
return requestObject
}
But this gets very messy, very quickly, and you still need to use some kind of asynchronous callback for this to work. This all falls down when you try and use Fetch. Also note that Promise cancellation is not currently a part of the spec. See here for more info on that particular bit.
TL:DR: synchronous introspection is not possible on any asynchronous operation in JavaScript and a Promise is not the way to go if you were to even attempt it. There is no way for you to synchronously display information about a request that is on-going, for example. In other languages, attempting to do this would require either blocking or a race condition.
Well. If using angular you can make use of the timeout parameter used by the $http service if you need to cancel and ongoing HTTP request.
Example in typescript:
interface ReturnObject {
cancelPromise: ng.IPromise;
httpPromise: ng.IHttpPromise;
}
#Service("moduleName", "aService")
class AService() {
constructor(private $http: ng.IHttpService
private $q: ng.IQService) { ; }
doSomethingAsynch(): ReturnObject {
var cancelPromise = this.$q.defer();
var httpPromise = this.$http.get("/blah", { timeout: cancelPromise.promise });
return { cancelPromise: cancelPromise, httpPromise: httpPromise };
}
}
#Controller("moduleName", "aController")
class AController {
constructor(aService: AService) {
var o = aService.doSomethingAsynch();
var timeout = setTimeout(() => {
o.cancelPromise.resolve();
}, 30 * 1000);
o.httpPromise.then((response) => {
clearTimeout(timeout);
// do code
}, (errorResponse) => {
// do code
});
}
}
Since this approach already returns an object with two promises the stretch to include any synchronous operation return data in that object is not far.
If you can describe what type of data you would want to return synchronously from such a method it would help to identify a pattern. Why can it not be another method that is called prior to or during your asynchronous operation?
You can kinda do this, but AFAIK it will require hacky workarounds. Note that exporting the resolve and reject methods is generally considered a promise anti-pattern (i.e. sign you shouldn't be using promises). See the bottom for something using setTimeout that may give you what you want without workarounds.
let xhrRequest = (path, data, method, success, fail) => {
const xhr = new XMLHttpRequest();
// could alternately be structured as polymorphic fns, YMMV
switch (method) {
case 'GET':
xhr.open('GET', path);
xhr.onload = () => {
if (xhr.status < 400 && xhr.status >= 200) {
success(xhr.responseText);
return null;
} else {
fail(new Error(`Server responded with a status of ${xhr.status}`));
return null;
}
};
xhr.onerror = () => {
fail(networkError);
return null;
}
xhr.send();
return null;
}
return xhr;
case 'POST':
// etc.
return xhr;
// and so on...
};
// can work with any function that can take success and fail callbacks
class CancellablePromise {
constructor (fn, ...params) {
this.promise = new Promise((res, rej) => {
this.resolve = res;
this.reject = rej;
fn(...params, this.resolve, this.reject);
return null;
});
}
};
let p = new CancellablePromise(xhrRequest, 'index.html', null, 'GET');
p.promise.then(loadPage).catch(handleError);
// times out after 2 seconds
setTimeout(() => { p.reject(new Error('timeout')) }, 2000);
// for an alternative version that simply tells the user when things
// are taking longer than expected, NOTE this can be done with vanilla
// promises:
let timeoutHandle = setTimeout(() => {
// don't use alert for real, but you get the idea
alert('Sorry its taking so long to load the page.');
}, 2000);
p.promise.then(() => clearTimeout(timeoutHandle));
Promises are beautiful. I don't think there is any reason that you can not handle this with promises. There are three ways that i can think of.
The simplest way to handle this is within the executer. If you would like to cancel the promise (like for instance because of timeout) you just define a timeout flag in the executer and turn it on with a setTimeout(_ => timeout = true, 5000) instruction and resolve or reject only if timeout is false. ie (!timeout && resolve(res) or !timeout && reject(err)) This way your promise indefinitely remains unresolved in case of a timeout and your onfulfillment and onreject functions at the then stage never gets called.
The second is very similar to the first but instead of keeping a flag you just invoke reject at the timeout with proper error description. And handle the rest at the then or catch stage.
However if you would like to carry the id of your asych operation to the sync world then you can also do it as follows;
In this case you have to promisify the async function yourself. Lets take an example. We have an async function to return the double of a number. This is the function
function doubleAsync(data,cb){
setTimeout(_ => cb(false, data*2),1000);
}
We would like to use promises. So normally we need a promisifier function which will take our async function and return another function which when run, takes our data and returns a promise. Right..? So here is the promisifier function;
function promisify(fun){
return (data) => new Promise((resolve,reject) => fun(data, (err,res) => err ? reject(err) : resolve(res)));
}
Lets se how they work together;
function promisify(fun){
return (data) => new Promise((resolve,reject) => fun(data, (err,res) => err ? reject(err) : resolve(res)));
}
function doubleAsync(data,cb){
setTimeout(_ => cb(false, data*2),1000);
}
var doubleWithPromise = promisify(doubleAsync);
doubleWithPromise(100).then(v => console.log("The asynchronously obtained result is: " + v));
So now you see our doubleWithPromise(data) function returns a promise and we chain a then stage to it and access the returned value.
But what you need is not only a promise but also the id of your asynch function. This is very simple. Your promisified function should return an object with two properties; a promise and an id. Lets see...
This time our async function will return a result randomly in 0-5 secs. We will obtain it's result.id synchronously along with the result.promise and use this id to cancel the promise if it fails to resolve within 2.5 secs. Any figure on console log Resolves in 2501 msecs or above will result nothing to happen and the promise is practically canceled.
function promisify(fun){
return function(data){
var result = {id:null, promise:null}; // template return object
result.promise = new Promise((resolve,reject) => result.id = fun(data, (err,res) => err ? reject(err) : resolve(res)));
return result;
};
}
function doubleAsync(data,cb){
var dur = ~~(Math.random()*5000); // return the double of the data within 0-5 seconds.
console.log("Resolve in " + dur + " msecs");
return setTimeout(_ => cb(false, data*2),dur);
}
var doubleWithPromise = promisify(doubleAsync),
promiseDataSet = doubleWithPromise(100);
setTimeout(_ => clearTimeout(promiseDataSet.id),2500); // give 2.5 seconds to the promise to resolve or cancel it.
promiseDataSet.promise
.then(v => console.log("The asynchronously obtained result is: " + v));
You can use fetch(), Response.body.getReader(), where when .read() is called returns a ReadableStream having a cancel method, which returns a Promise upon cancelling read of the stream.
// 58977 bytes of text, 59175 total bytes
var url = "https://gist.githubusercontent.com/anonymous/"
+ "2250b78a2ddc80a4de817bbf414b1704/raw/"
+ "4dc10dacc26045f5c48f6d74440213584202f2d2/lorem.txt";
var n = 10000;
var clicked = false;
var button = document.querySelector("button");
button.addEventListener("click", () => {clicked = true});
fetch(url)
.then(response => response.body.getReader())
.then(reader => {
var len = 0;
reader.read().then(function processData(result) {
if (result.done) {
// do stuff when `reader` is `closed`
return reader.closed.then(function() {
return "stream complete"
});
};
if (!clicked) {
len += result.value.byteLength;
}
// cancel stream if `button` clicked or
// to bytes processed is greater than 10000
if (clicked || len > n) {
return reader.cancel().then(function() {
return "read aborted at " + len + " bytes"
})
}
console.log("len:", len, "result value:", result.value);
return reader.read().then(processData)
})
.then(function(msg) {
alert(msg)
})
.catch(function(err) {
console.log("err", err)
})
});
<button>click to abort stream</button>
The method I am currently using is as follows:
var optionalReturnsObject = {};
functionThatReturnsPromise(dataToSend, optionalReturnsObject ).then(doStuffOnAsyncComplete);
console.log("Some instant data has been returned here:", optionalReturnsObject );
For me, the advantage of this is that another member of my team can use this in a simple way:
functionThatReturnsPromise(data).then(...);
And not need to worry about the returns object. An advanced user can see from the definitions what is going on.

Is Node.js native Promise.all processing in parallel or sequentially?

I would like to clarify this point, as the documentation is not too clear about it;
Q1: Is Promise.all(iterable) processing all promises sequentially or in parallel? Or, more specifically, is it the equivalent of running chained promises like
p1.then(p2).then(p3).then(p4).then(p5)....
or is it some other kind of algorithm where all p1, p2, p3, p4, p5, etc. are being called at the same time (in parallel) and results are returned as soon as all resolve (or one rejects)?
Q2: If Promise.all runs in parallel, is there a convenient way to run an iterable sequencially?
Note: I don't want to use Q, or Bluebird, but all native ES6 specs.
Is Promise.all(iterable) executing all promises?
No, promises cannot "be executed". They start their task when they are being created - they represent the results only - and you are executing everything in parallel even before passing them to Promise.all.
Promise.all does only await multiple promises. It doesn't care in what order they resolve, or whether the computations are running in parallel.
is there a convenient way to run an iterable sequencially?
If you already have your promises, you can't do much but Promise.all([p1, p2, p3, …]) (which does not have a notion of sequence). But if you do have an iterable of asynchronous functions, you can indeed run them sequentially. Basically you need to get from
[fn1, fn2, fn3, …]
to
fn1().then(fn2).then(fn3).then(…)
and the solution to do that is using Array::reduce:
iterable.reduce((p, fn) => p.then(fn), Promise.resolve())
In parallel
await Promise.all(items.map(async (item) => {
await fetchItem(item)
}))
Advantages: Faster. All iterations will be started even if one fails later on. However, it will "fail fast". Use Promise.allSettled, to complete all iterations in parallel even if some throw. Technically, these are concurrent invocations not in parallel.
In sequence
for (const item of items) {
await fetchItem(item)
}
Advantages: Variables in the loop can be shared by each iteration. Behaves like normal imperative synchronous code.
NodeJS does not run promises in parallel, it runs them concurrently since it’s a single-threaded event loop architecture. There is a possibility to run things in parallel by creating a new child process to take advantage of the multiple core CPU.
Parallel Vs Concurent
In fact, what Promise.all does is, stacking the promises function in the appropriate queue (see event loop architecture) running them concurrently (call P1, P2,...) then waiting for each result, then resolving the Promise.all with all the promises results.
Promise.all will fail at the first promise which fails unless you have to manage the rejection yourself.
There is a major difference between parallel and concurrent, the first one will run a different computation in a separate process at exactly the same time and they will progress at their rhythm, while the other one will execute the different computation one after another without waiting for the previous computation to finish and progress at the same time without depending on each other.
Finally, to answer your question, Promise.all will execute neither in parallel nor sequentially but concurrently.
Bergi's answer got me on the right track using Array.reduce.
However, to actually get the functions returning my promises to execute one after another I had to add some more nesting.
My real use case is an array of files that I need to transfer in order one after another due to limits downstream...
Here is what I ended up with:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(() => {
return transferFile(theFile); //function returns a promise
});
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
As previous answers suggest, using:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(transferFile(theFile));
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
didn't wait for the transfer to complete before starting another and also the "All files transferred" text came before even the first file transfer was started.
Not sure what I did wrong, but wanted to share what worked for me.
Edit: Since I wrote this post I now understand why the first version didn't work. then() expects a function returning a promise. So, you should pass in the function name without parentheses! Now, my function wants an argument so then I need to wrap in in a anonymous function taking no argument!
You can also process an iterable sequentially with an async function using a recursive function. For example, given an array a to process with asynchronous function someAsyncFunction():
var a = [1, 2, 3, 4, 5, 6]
function someAsyncFunction(n) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log("someAsyncFunction: ", n)
resolve(n)
}, Math.random() * 1500)
})
}
//You can run each array sequentially with:
function sequential(arr, index = 0) {
if (index >= arr.length) return Promise.resolve()
return someAsyncFunction(arr[index])
.then(r => {
console.log("got value: ", r)
return sequential(arr, index + 1)
})
}
sequential(a).then(() => console.log("done"))
Just to elaborate on #Bergi's answer (which is very succinct, but tricky to understand ;)
This code will run each item in the array and add the next 'then chain' to the end:
function eachorder(prev,order) {
return prev.then(function() {
return get_order(order)
.then(check_order)
.then(update_order);
});
}
orderArray.reduce(eachorder,Promise.resolve());
Using async await an array of promises can easily be executed sequentially:
let a = [promise1, promise2, promise3];
async function func() {
for(let i=0; i<a.length; i++){
await a[i]();
}
}
func();
Note: In above implementation, if a promise is rejected, the rest wouldn't be executed.If you want all your promises to be executed, then wrap your await a[i](); inside try catch
parallel
see this example
const resolveAfterTimeout = async i => {
return new Promise(resolve => {
console.log("CALLED");
setTimeout(() => {
resolve("RESOLVED", i);
}, 5000);
});
};
const call = async () => {
const res = await Promise.all([
resolveAfterTimeout(1),
resolveAfterTimeout(2),
resolveAfterTimeout(3),
resolveAfterTimeout(4),
resolveAfterTimeout(5),
resolveAfterTimeout(6)
]);
console.log({ res });
};
call();
by running the code it'll console "CALLED" for all six promises and when they are resolved it will console every 6 responses after timeout at the same time
I stumbled across this page while trying to solve a problem in NodeJS: reassembly of file chunks. Basically:
I have an array of filenames.
I need to append all those files, in the correct order, to create one large file.
I must do this asynchronously.
Node's 'fs' module does provide appendFileSync but I didn't want to block the server during this operation. I wanted to use the fs.promises module and find a way to chain this stuff together. The examples on this page didn't quite work for me because I actually needed two operations: fsPromises.read() to read in the file chunk, and fsPromises.appendFile() to concat to the destination file. Maybe if I was better with JavaScript I could have made the previous answers work for me. ;-)
I stumbled across this and I was able to hack together a working solution:
/**
* sequentially append a list of files into a specified destination file
*/
exports.append_files = function (destinationFile, arrayOfFilenames) {
return arrayOfFilenames.reduce((previousPromise, currentFile) => {
return previousPromise.then(() => {
return fsPromises.readFile(currentFile).then(fileContents => {
return fsPromises.appendFile(destinationFile, fileContents);
});
});
}, Promise.resolve());
};
And here's a jasmine unit test for it:
const fsPromises = require('fs').promises;
const fsUtils = require( ... );
const TEMPDIR = 'temp';
describe("test append_files", function() {
it('append_files should work', async function(done) {
try {
// setup: create some files
await fsPromises.mkdir(TEMPDIR);
await fsPromises.writeFile(path.join(TEMPDIR, '1'), 'one');
await fsPromises.writeFile(path.join(TEMPDIR, '2'), 'two');
await fsPromises.writeFile(path.join(TEMPDIR, '3'), 'three');
await fsPromises.writeFile(path.join(TEMPDIR, '4'), 'four');
await fsPromises.writeFile(path.join(TEMPDIR, '5'), 'five');
const filenameArray = [];
for (var i=1; i < 6; i++) {
filenameArray.push(path.join(TEMPDIR, i.toString()));
}
const DESTFILE = path.join(TEMPDIR, 'final');
await fsUtils.append_files(DESTFILE, filenameArray);
// confirm "final" file exists
const fsStat = await fsPromises.stat(DESTFILE);
expect(fsStat.isFile()).toBeTruthy();
// confirm content of the "final" file
const expectedContent = new Buffer('onetwothreefourfive', 'utf8');
var fileContents = await fsPromises.readFile(DESTFILE);
expect(fileContents).toEqual(expectedContent);
done();
}
catch (err) {
fail(err);
}
finally {
}
});
});
You can do it by for loop.
async function return promise:
async function createClient(client) {
return await Client.create(client);
}
let clients = [client1, client2, client3];
if you write following code then client are created parallelly:
const createdClientsArray = yield Promise.all(clients.map((client) =>
createClient(client);
));
But if you want to create client sequentially then you should use for loop:
const createdClientsArray = [];
for(let i = 0; i < clients.length; i++) {
const createdClient = yield createClient(clients[i]);
createdClientsArray.push(createdClient);
}
Bergi's answer helped me to make the call synchronous. I have added an example below where we call each function after the previous function is called:
function func1 (param1) {
console.log("function1 : " + param1);
}
function func2 () {
console.log("function2");
}
function func3 (param2, param3) {
console.log("function3 : " + param2 + ", " + param3);
}
function func4 (param4) {
console.log("function4 : " + param4);
}
param4 = "Kate";
//adding 3 functions to array
a=[
()=>func1("Hi"),
()=>func2(),
()=>func3("Lindsay",param4)
];
//adding 4th function
a.push(()=>func4("dad"));
//below does func1().then(func2).then(func3).then(func4)
a.reduce((p, fn) => p.then(fn), Promise.resolve());
I've been using for of in order to solve sequential promises. I'm not sure if it helps here but this is what I've been doing.
async function run() {
for (let val of arr) {
const res = await someQuery(val)
console.log(val)
}
}
run().then().catch()
Yes, you can chain an array of promise returning functions as follows
(this passes the result of each function to the next). You could of course edit it to pass the same argument (or no arguments) to each function.
function tester1(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a + 1);
}, 1000);
})
}
function tester2(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a * 5);
}, 1000);
})
}
function promise_chain(args, list, results) {
return new Promise(function(done, errs) {
var fn = list.shift();
if (results === undefined) results = [];
if (typeof fn === 'function') {
fn(args).then(function(result) {
results.push(result);
console.log(result);
promise_chain(result, list, results).then(done);
}, errs);
} else {
done(results);
}
});
}
promise_chain(0, [tester1, tester2, tester1, tester2, tester2]).then(console.log.bind(console), console.error.bind(console));
see this sample
Promise.all working parallel
const { range, random, forEach, delay} = require("lodash");
const run = id => {
console.log(`Start Task ${id}`);
let prom = new Promise((resolve, reject) => {
delay(() => {
console.log(`Finish Task ${id}`);
resolve(id);
}, random(2000, 15000));
});
return prom;
}
const exec = () => {
let proms = [];
forEach(range(1,10), (id,index) => {
proms.push(run(id));
});
let allPromis = Promise.all(proms);
allPromis.then(
res => {
forEach(res, v => console.log(v));
}
);
}
exec();

Categories

Resources