Use of promises in loop (node) - javascript

I'm working on a NodeJS backend service, and use promises to load any data. Now, I want to make a combination (array) of items coming from different sources. I have the following solution, but I don't think this is the right way to get around something like this.
var results = [];
loop(items, index, data, results).then(function() {
console.log(results);
});
function loop(items, index, data, results) {
return utils.getPromise(function(resolve, reject) {
if (index === items.length) {
// Stop
resolve();
} else {
doAction(items[index], data).then(function(result) {
if (result) {
mergeResults(results, result)
} else {
loop(items, index + 1, data, results).then(resolve, reject);
}
}, reject);
}
});
}
function doAction(item, data) {
return utils.getPromise(function(resolve, reject) {
item.doIt(data).then(resolve, reject);
});
}
I think the right way would be to return a promise immediately, and add results on the fly, but I don't know exactly how to do this. Any suggestions?

You could use Promise.all to gather all the results from the promises. The .all promise itself is then resolved with an array containing all the single results from each promise.
I think the right way would be to return a promise immediately
Like you suggested you would just return the promise and create an array from these promises. After the loop you give this array into Promise.all.
Might look like this:
var promiseArray = [];
for(var i=0;i<array.length;i++){
promiseArray.push(doSomethingAsync(array[i]));
}
Promise.all(promiseArray).then(function(responseArray){
//do something with the results
});

function randTimeout(t) {
return new Promise(function (resolve) {
setTimeout(function () {
console.log('time done:' + t);
resolve();
}, t);
});
}
var timeouts = [];
for (var l = 0; l < 20; l ++) {
timeouts.push(Math.random() * 1000 | 0);
}
console.log('start');
Promise.map(timeouts, function (t) {
return randTimeout(t);
}, {concurrency: 3});
<script src="https://cdnjs.cloudflare.com/ajax/libs/bluebird/3.4.6/bluebird.min.js"></script>
Like #Tobi says, you can use Promise.All..
But Promise.all has one gotcha!!.
If the array your processing has lots of items, sending lots of items to go off and do some async work at the same time might be sub-optimal. And in some cases impossible, eg. opening too many files at once in node will get you a too many handles error.
The Bluebird promise library has a useful function called map, and has an option called concurrency.. This allows you to process X amounts of promises at a time. Personally I think the map function with a concurrency option should be part of the Promise spec, as it's so useful. Without using the concurrency option it would have the same effect as doing promise.all, without having to make an array first.
In the above example I created 20 random timeouts, of max of 1 second. Without using map the maximum time doing this would be 1 second, as all promises would be executed at once. But using the concurrency option of 3, it's doing 3 at a time, and as you can see takes longer than 1 second to complete.

Related

Looping through promise and waiting?

I made a function that connects to an API which returns an array with 100 objects at a time. Without the loop the getPageData() function works when you pass an int. However when I try to iterate through it, it gives me back nothing. Set timeout function doesn't seem to help the loop.
Please help
async function updateWholeDB(){
var results = [];
for (let i = 0; i < getPages(); i++) {
setTimeout(function() {
getPageData(i).then((data) => {
console.log(data)
results.push(data) ;
})
}, 2000)
}
return Promise.all(results);
}
Promise.all() does it's work properly when you pass it an array of promises, not an array of results. It then returns a promise that resolves to an array of results. So, you're not using it properly.
In addition, there doesn't appear to be any reason for a setTimeout() here if things are coded properly.
If what you're trying to do is to end up with an array of results from calling getPageData() a bunch of times, you can do that like this:
function updateWholeDB(){
let promises = [];
for (let i = 0; i < getPages(); i++) {
promises.push(getPageData(i));
}
return Promise.all(promises);
}
// usage
updateWholeDB().then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
This assumes that getPageData() returns a promise that resolves properly with the data you are interested in. If that's not the case, then you will have to show us that code too so we can offer advice on how to fix it.

Javascript: Making sure one async function doesn't run until the other one is complete; working with promises

I'm working with fetching information from a github repository. I want to get the list of pull requests within that repo, get the list of commits associated with each pull request, then for each commit I want to get information such as the author of the commit, the number of files associated with each commit and the number of additions and deletions made to each file. I'm using axios and the github API to accomplish this. I know how to work with the API, but the promises and async functions are keeping me from accomplishing my task. I have the following code:
const axios = require('axios');
var mapOfInformationObjects = new Map();
var listOfCommits = [];
var listOfSHAs = [];
var gitApiPrefix = link I'll use to start fetching data;
var listOfPullRequestDataObjects = [];
var listOfPullRequestNumbers = [];
var mapOfPullNumberToCommits = new Map();
function getAllPullRequests(gitPullRequestApiLink) {
return new Promise((resolve, reject) => {
axios.get(gitPullRequestApiLink).then((response) =>{
listOfPullRequestDataObjects = response['data'];
var k;
for (k = 0; k < listOfPullRequestDataObjects.length; k++){
listOfPullRequestNumbers.push(listOfPullRequestDataObjects[k]['number']);
}
resolve(listOfPullRequestNumbers);
}).catch((error) => {
reject(error);
})
})
}
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
var j;
for (j = 0; j < listOfPRNumbers.length; j++) {
currPromise = new Promise((resolve, reject) => {
currentGitApiLink = gitApiPrefix + listOfPRNumbers[j] + "/commits";
axios.get(currentGitApiLink).then((response) => {
mapOfPullNumberToCommits.set(listOfPRNumbers[j], response['data']);
resolve("Done with Pull Request Number: " + listOfPRNumbers[j]);
}).catch((error) => {
reject(error);
})
})
}
}
function getListOfCommits(gitCommitApiLink){
return new Promise((resolve, reject) => {
axios.get(gitCommitApiLink).then((response) => {
resolve(response);
}).catch((error) => {
reject(error);
})
})
}
So far, I made some functions that I would like to call sequentially.
First I'd like to call getAllPullRequestNumbers(someLink)
Then I'd like to call getCommitsForEachPullRequestNumber(listofprnumbers)
Then getListOfCommits(anotherLink)
So it would look something like
getAllPullRequestNumbers(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
But two problems arise:
1) I'm not sure if this is how you would call the functions so that the first function in the sequence completes before the other.
2) Because I'm not familiar with Javascript, I'm not sure, especially with the getCommitsForEachPullRequestNumber function since you run a loop and call axios.get() on each iteration of the loop, if this is how you work with promises within the functions.
Would this be how you would go about accomplishing these two tasks? Any help is much appreciated. Thanks!
When you a number of asynchronous operations (represented by promises) that you can run all together and you want to know when they are all done, you use Promise.all(). You collect an array of promises and pass it to Promise.all() and it will tell you when they have all completed or when one of them triggers an error. If all completed, Promise.all() will return a promise that resolves to an array of results (one for each asynchronous operation).
When you're iterating an array to do your set of asynchronous operations, it then works best to use .map() because that helps you create a parallel array of promises that you can feed to Promise.all(). Here's how you do that in getCommitsForEachPullRequestNumber():
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
let mapOfPullNumberToCommits = new Map();
return Promise.all(listOfPRNumbers.map(item => {
let currentGitApiLink = gitApiPrefix + item + "/commits";
return axios.get(currentGitApiLink).then(response => {
// put data into the map
mapOfPullNumberToCommits.set(item, response.data);
});
})).then(() => {
// make resolved value be the map we created, now that everything is done
return mapOfPullNumberToCommits;
});
}
// usage:
getCommitsForEachPullRequestNumber(list).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
Then, in getListOfCommits(), since axios already returns a promise, there is no reason to wrap it in a manually created promise. That is, in fact, consider a promise anti-pattern. Instead, just return the promise that axios already returns. In fact, there's probably not even a reason to have this as a function since one can just use axios.get() directly to achieve the same result:
function getListOfCommits(gitCommitApiLink){
return axios.get(gitCommitApiLink);
}
Then, in getAllPullRequests() it appears you are just doing one axios.get() call and then processing the results. That can be done like this:
function getAllPullRequests(gitPullRequestApiLink) {
return axios.get(gitPullRequestApiLink).then(response => {
let listOfPullRequestDataObjects = response.data;
return listOfPullRequestDataObjects.map(item => {
return item.number;
});
});
}
Now, if you're trying to execute these three operations sequentially in this order:
getAllPullRequests(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
You can chain the promises from those three operations together to sequence them:
getAllPullRequests(someLink)
.then(getCommitsForEachPullRequestNumber)
.then(mapOfPullNumberToCommits => {
// not entirely sure what you want to do here, perhaps
// call getListOfCommits on each item in the map?
}).catch(err => {
console.log(err);
});
Or, if you put this code in an async function, then you can use async/awit:
async function getAllCommits(someLink) {
let pullRequests = await getAllPullRequests(someLink);
let mapOfPullNumberToCommits = await getCommitsForEachPullRequestNumber(pullRequests);
// then use getlistOfCommits() somehow to process mapOfPullNumberToCommits
return finalResults;
}
getAllCommits.then(finalResults => {
console.log(finalResults);
}).catch(err => {
console.log(err);
});
not as clean as jfriend00 solution,
but I played with your code and it finally worked
https://repl.it/#gui3/githubApiPromises
you get the list of commits in the variable listOfCommits
I don't understand the purpose of your last function, so I dropped it

calling functions in sequence using callbacks [duplicate]

I have code that looks something like this in javascript:
forloop {
//async call, returns an array to its callback
}
After ALL of those async calls are done, I want to calculate the min over all of the arrays.
How can I wait for all of them?
My only idea right now is to have an array of booleans called done, and set done[i] to true in the ith callback function, then say while(not all are done) {}
edit: I suppose one possible, but ugly solution, would be to edit the done array in each callback, then call a method if all other done are set from each callback, thus the last callback to complete will call the continuing method.
You haven't been very specific with your code, so I'll make up a scenario. Let's say you have 10 ajax calls and you want to accumulate the results from those 10 ajax calls and then when they have all completed you want to do something. You can do it like this by accumulating the data in an array and keeping track of when the last one has finished:
Manual Counter
var ajaxCallsRemaining = 10;
var returnedData = [];
for (var i = 0; i < 10; i++) {
doAjax(whatever, function(response) {
// success handler from the ajax call
// save response
returnedData.push(response);
// see if we're done with the last ajax call
--ajaxCallsRemaining;
if (ajaxCallsRemaining <= 0) {
// all data is here now
// look through the returnedData and do whatever processing
// you want on it right here
}
});
}
Note: error handling is important here (not shown because it's specific to how you're making your ajax calls). You will want to think about how you're going to handle the case when one ajax call never completes, either with an error or gets stuck for a long time or times out after a long time.
jQuery Promises
Adding to my answer in 2014. These days, promises are often used to solve this type of problem since jQuery's $.ajax() already returns a promise and $.when() will let you know when a group of promises are all resolved and will collect the return results for you:
var promises = [];
for (var i = 0; i < 10; i++) {
promises.push($.ajax(...));
}
$.when.apply($, promises).then(function() {
// returned data is in arguments[0][0], arguments[1][0], ... arguments[9][0]
// you can process it here
}, function() {
// error occurred
});
ES6 Standard Promises
As specified in kba's answer: if you have an environment with native promises built-in (modern browser or node.js or using babeljs transpile or using a promise polyfill), then you can use ES6-specified promises. See this table for browser support. Promises are supported in pretty much all current browsers, except IE.
If doAjax() returns a promise, then you can do this:
var promises = [];
for (var i = 0; i < 10; i++) {
promises.push(doAjax(...));
}
Promise.all(promises).then(function() {
// returned data is in arguments[0], arguments[1], ... arguments[n]
// you can process it here
}, function(err) {
// error occurred
});
If you need to make a non-promise async operation into one that returns a promise, you can "promisify" it like this:
function doAjax(...) {
return new Promise(function(resolve, reject) {
someAsyncOperation(..., function(err, result) {
if (err) return reject(err);
resolve(result);
});
});
}
And, then use the pattern above:
var promises = [];
for (var i = 0; i < 10; i++) {
promises.push(doAjax(...));
}
Promise.all(promises).then(function() {
// returned data is in arguments[0], arguments[1], ... arguments[n]
// you can process it here
}, function(err) {
// error occurred
});
Bluebird Promises
If you use a more feature rich library such as the Bluebird promise library, then it has some additional functions built in to make this easier:
var doAjax = Promise.promisify(someAsync);
var someData = [...]
Promise.map(someData, doAjax).then(function(results) {
// all ajax results here
}, function(err) {
// some error here
});
Checking in from 2015: We now have native promises in most recent browser (Edge 12, Firefox 40, Chrome 43, Safari 8, Opera 32 and Android browser 4.4.4 and iOS Safari 8.4, but not Internet Explorer, Opera Mini and older versions of Android).
If we want to perform 10 async actions and get notified when they've all finished, we can use the native Promise.all, without any external libraries:
function asyncAction(i) {
return new Promise(function(resolve, reject) {
var result = calculateResult();
if (result.hasError()) {
return reject(result.error);
}
return resolve(result);
});
}
var promises = [];
for (var i=0; i < 10; i++) {
promises.push(asyncAction(i));
}
Promise.all(promises).then(function AcceptHandler(results) {
handleResults(results),
}, function ErrorHandler(error) {
handleError(error);
});
You can use jQuery's Deferred object along with the when method.
deferredArray = [];
forloop {
deferred = new $.Deferred();
ajaxCall(function() {
deferred.resolve();
}
deferredArray.push(deferred);
}
$.when(deferredArray, function() {
//this code is called after all the ajax calls are done
});
You can emulate it like this:
countDownLatch = {
count: 0,
check: function() {
this.count--;
if (this.count == 0) this.calculate();
},
calculate: function() {...}
};
then each async call does this:
countDownLatch.count++;
while in each asynch call back at the end of the method you add this line:
countDownLatch.check();
In other words, you emulate a count-down-latch functionality.
This is the most neat way in my opinion.
Promise.all
FetchAPI
(for some reason Array.map doesn't work inside .then functions for me. But you can use a .forEach and [].concat() or something similar)
Promise.all([
fetch('/user/4'),
fetch('/user/5'),
fetch('/user/6'),
fetch('/user/7'),
fetch('/user/8')
]).then(responses => {
return responses.map(response => {response.json()})
}).then((values) => {
console.log(values);
})
Use an control flow library like after
after.map(array, function (value, done) {
// do something async
setTimeout(function () {
// do something with the value
done(null, value * 2)
}, 10)
}, function (err, mappedArray) {
// all done, continue here
console.log(mappedArray)
})
In Node.js you can use async/await to control the async flow
async/await is supported in Node.js 7.6
util function to promisify callback is supported in Node.js v8
Sample Code:
const foo = async () => {
try {
const ids = [100, 101, 102];
const fetchFromExternalApi = util.promisify(fetchFromExternalApiCallback);
const promises = ids.map((id) => fetchFromExternalApi(id));
const dataList = await Promise.resolve(promises); // dataList is an array
return dataList;
} catch (err) {
// error handling
}
};
I see several response with Promise.all(), but this function stop if any promise generate an exception...
The best solution in 2022 is Promise.allSettled() (documentation here: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/allSettled
A quick sample:
const allPromises = [];
for (r in records) {
const promise = update_async(r);
allPromises.push(promise);
};
await Promise.allSettled(allPromises);
At the end, you obtain with allPromises an array with the result of each promise:
when ok --> {status: “fulfilled”, value: xxx }
when error --> {status: "rejected", reason: Error: an error}

Is Node.js native Promise.all processing in parallel or sequentially?

I would like to clarify this point, as the documentation is not too clear about it;
Q1: Is Promise.all(iterable) processing all promises sequentially or in parallel? Or, more specifically, is it the equivalent of running chained promises like
p1.then(p2).then(p3).then(p4).then(p5)....
or is it some other kind of algorithm where all p1, p2, p3, p4, p5, etc. are being called at the same time (in parallel) and results are returned as soon as all resolve (or one rejects)?
Q2: If Promise.all runs in parallel, is there a convenient way to run an iterable sequencially?
Note: I don't want to use Q, or Bluebird, but all native ES6 specs.
Is Promise.all(iterable) executing all promises?
No, promises cannot "be executed". They start their task when they are being created - they represent the results only - and you are executing everything in parallel even before passing them to Promise.all.
Promise.all does only await multiple promises. It doesn't care in what order they resolve, or whether the computations are running in parallel.
is there a convenient way to run an iterable sequencially?
If you already have your promises, you can't do much but Promise.all([p1, p2, p3, …]) (which does not have a notion of sequence). But if you do have an iterable of asynchronous functions, you can indeed run them sequentially. Basically you need to get from
[fn1, fn2, fn3, …]
to
fn1().then(fn2).then(fn3).then(…)
and the solution to do that is using Array::reduce:
iterable.reduce((p, fn) => p.then(fn), Promise.resolve())
In parallel
await Promise.all(items.map(async (item) => {
await fetchItem(item)
}))
Advantages: Faster. All iterations will be started even if one fails later on. However, it will "fail fast". Use Promise.allSettled, to complete all iterations in parallel even if some throw. Technically, these are concurrent invocations not in parallel.
In sequence
for (const item of items) {
await fetchItem(item)
}
Advantages: Variables in the loop can be shared by each iteration. Behaves like normal imperative synchronous code.
NodeJS does not run promises in parallel, it runs them concurrently since it’s a single-threaded event loop architecture. There is a possibility to run things in parallel by creating a new child process to take advantage of the multiple core CPU.
Parallel Vs Concurent
In fact, what Promise.all does is, stacking the promises function in the appropriate queue (see event loop architecture) running them concurrently (call P1, P2,...) then waiting for each result, then resolving the Promise.all with all the promises results.
Promise.all will fail at the first promise which fails unless you have to manage the rejection yourself.
There is a major difference between parallel and concurrent, the first one will run a different computation in a separate process at exactly the same time and they will progress at their rhythm, while the other one will execute the different computation one after another without waiting for the previous computation to finish and progress at the same time without depending on each other.
Finally, to answer your question, Promise.all will execute neither in parallel nor sequentially but concurrently.
Bergi's answer got me on the right track using Array.reduce.
However, to actually get the functions returning my promises to execute one after another I had to add some more nesting.
My real use case is an array of files that I need to transfer in order one after another due to limits downstream...
Here is what I ended up with:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(() => {
return transferFile(theFile); //function returns a promise
});
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
As previous answers suggest, using:
getAllFiles().then( (files) => {
return files.reduce((p, theFile) => {
return p.then(transferFile(theFile));
}, Promise.resolve()).then(()=>{
console.log("All files transferred");
});
}).catch((error)=>{
console.log(error);
});
didn't wait for the transfer to complete before starting another and also the "All files transferred" text came before even the first file transfer was started.
Not sure what I did wrong, but wanted to share what worked for me.
Edit: Since I wrote this post I now understand why the first version didn't work. then() expects a function returning a promise. So, you should pass in the function name without parentheses! Now, my function wants an argument so then I need to wrap in in a anonymous function taking no argument!
You can also process an iterable sequentially with an async function using a recursive function. For example, given an array a to process with asynchronous function someAsyncFunction():
var a = [1, 2, 3, 4, 5, 6]
function someAsyncFunction(n) {
return new Promise((resolve, reject) => {
setTimeout(() => {
console.log("someAsyncFunction: ", n)
resolve(n)
}, Math.random() * 1500)
})
}
//You can run each array sequentially with:
function sequential(arr, index = 0) {
if (index >= arr.length) return Promise.resolve()
return someAsyncFunction(arr[index])
.then(r => {
console.log("got value: ", r)
return sequential(arr, index + 1)
})
}
sequential(a).then(() => console.log("done"))
Just to elaborate on #Bergi's answer (which is very succinct, but tricky to understand ;)
This code will run each item in the array and add the next 'then chain' to the end:
function eachorder(prev,order) {
return prev.then(function() {
return get_order(order)
.then(check_order)
.then(update_order);
});
}
orderArray.reduce(eachorder,Promise.resolve());
Using async await an array of promises can easily be executed sequentially:
let a = [promise1, promise2, promise3];
async function func() {
for(let i=0; i<a.length; i++){
await a[i]();
}
}
func();
Note: In above implementation, if a promise is rejected, the rest wouldn't be executed.If you want all your promises to be executed, then wrap your await a[i](); inside try catch
parallel
see this example
const resolveAfterTimeout = async i => {
return new Promise(resolve => {
console.log("CALLED");
setTimeout(() => {
resolve("RESOLVED", i);
}, 5000);
});
};
const call = async () => {
const res = await Promise.all([
resolveAfterTimeout(1),
resolveAfterTimeout(2),
resolveAfterTimeout(3),
resolveAfterTimeout(4),
resolveAfterTimeout(5),
resolveAfterTimeout(6)
]);
console.log({ res });
};
call();
by running the code it'll console "CALLED" for all six promises and when they are resolved it will console every 6 responses after timeout at the same time
I stumbled across this page while trying to solve a problem in NodeJS: reassembly of file chunks. Basically:
I have an array of filenames.
I need to append all those files, in the correct order, to create one large file.
I must do this asynchronously.
Node's 'fs' module does provide appendFileSync but I didn't want to block the server during this operation. I wanted to use the fs.promises module and find a way to chain this stuff together. The examples on this page didn't quite work for me because I actually needed two operations: fsPromises.read() to read in the file chunk, and fsPromises.appendFile() to concat to the destination file. Maybe if I was better with JavaScript I could have made the previous answers work for me. ;-)
I stumbled across this and I was able to hack together a working solution:
/**
* sequentially append a list of files into a specified destination file
*/
exports.append_files = function (destinationFile, arrayOfFilenames) {
return arrayOfFilenames.reduce((previousPromise, currentFile) => {
return previousPromise.then(() => {
return fsPromises.readFile(currentFile).then(fileContents => {
return fsPromises.appendFile(destinationFile, fileContents);
});
});
}, Promise.resolve());
};
And here's a jasmine unit test for it:
const fsPromises = require('fs').promises;
const fsUtils = require( ... );
const TEMPDIR = 'temp';
describe("test append_files", function() {
it('append_files should work', async function(done) {
try {
// setup: create some files
await fsPromises.mkdir(TEMPDIR);
await fsPromises.writeFile(path.join(TEMPDIR, '1'), 'one');
await fsPromises.writeFile(path.join(TEMPDIR, '2'), 'two');
await fsPromises.writeFile(path.join(TEMPDIR, '3'), 'three');
await fsPromises.writeFile(path.join(TEMPDIR, '4'), 'four');
await fsPromises.writeFile(path.join(TEMPDIR, '5'), 'five');
const filenameArray = [];
for (var i=1; i < 6; i++) {
filenameArray.push(path.join(TEMPDIR, i.toString()));
}
const DESTFILE = path.join(TEMPDIR, 'final');
await fsUtils.append_files(DESTFILE, filenameArray);
// confirm "final" file exists
const fsStat = await fsPromises.stat(DESTFILE);
expect(fsStat.isFile()).toBeTruthy();
// confirm content of the "final" file
const expectedContent = new Buffer('onetwothreefourfive', 'utf8');
var fileContents = await fsPromises.readFile(DESTFILE);
expect(fileContents).toEqual(expectedContent);
done();
}
catch (err) {
fail(err);
}
finally {
}
});
});
You can do it by for loop.
async function return promise:
async function createClient(client) {
return await Client.create(client);
}
let clients = [client1, client2, client3];
if you write following code then client are created parallelly:
const createdClientsArray = yield Promise.all(clients.map((client) =>
createClient(client);
));
But if you want to create client sequentially then you should use for loop:
const createdClientsArray = [];
for(let i = 0; i < clients.length; i++) {
const createdClient = yield createClient(clients[i]);
createdClientsArray.push(createdClient);
}
Bergi's answer helped me to make the call synchronous. I have added an example below where we call each function after the previous function is called:
function func1 (param1) {
console.log("function1 : " + param1);
}
function func2 () {
console.log("function2");
}
function func3 (param2, param3) {
console.log("function3 : " + param2 + ", " + param3);
}
function func4 (param4) {
console.log("function4 : " + param4);
}
param4 = "Kate";
//adding 3 functions to array
a=[
()=>func1("Hi"),
()=>func2(),
()=>func3("Lindsay",param4)
];
//adding 4th function
a.push(()=>func4("dad"));
//below does func1().then(func2).then(func3).then(func4)
a.reduce((p, fn) => p.then(fn), Promise.resolve());
I've been using for of in order to solve sequential promises. I'm not sure if it helps here but this is what I've been doing.
async function run() {
for (let val of arr) {
const res = await someQuery(val)
console.log(val)
}
}
run().then().catch()
Yes, you can chain an array of promise returning functions as follows
(this passes the result of each function to the next). You could of course edit it to pass the same argument (or no arguments) to each function.
function tester1(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a + 1);
}, 1000);
})
}
function tester2(a) {
return new Promise(function(done) {
setTimeout(function() {
done(a * 5);
}, 1000);
})
}
function promise_chain(args, list, results) {
return new Promise(function(done, errs) {
var fn = list.shift();
if (results === undefined) results = [];
if (typeof fn === 'function') {
fn(args).then(function(result) {
results.push(result);
console.log(result);
promise_chain(result, list, results).then(done);
}, errs);
} else {
done(results);
}
});
}
promise_chain(0, [tester1, tester2, tester1, tester2, tester2]).then(console.log.bind(console), console.error.bind(console));
see this sample
Promise.all working parallel
const { range, random, forEach, delay} = require("lodash");
const run = id => {
console.log(`Start Task ${id}`);
let prom = new Promise((resolve, reject) => {
delay(() => {
console.log(`Finish Task ${id}`);
resolve(id);
}, random(2000, 15000));
});
return prom;
}
const exec = () => {
let proms = [];
forEach(range(1,10), (id,index) => {
proms.push(run(id));
});
let allPromis = Promise.all(proms);
allPromis.then(
res => {
forEach(res, v => console.log(v));
}
);
}
exec();

Performance considerations with `Promise.all` and a large amount of asynchronous operations

When using Promise.all with asynchronous code (in case of synchronous code, there is nothing to worry about), you can suffer from severe performance (if not other kinds of) issues, when you want to send out a whole bunch (be it tens, hundreds, thousands or even millions) of requests, given the receiving end of your asynchronous operations (e.g. the local filesystem, an HTTP server, a database, etc etc.) does not gracefully handle that many parallel requests.
For that case, it would be perfect if we could tell Promise.all up to how many promises we want to have in-flight simultaneously. However, since A+ is supposed to be lean, adding these sorts of fancy features would certainly not make sense.
So what would be a better way of achieving this?
Well, first of all - it's impossible to give a concurrency argument to Promise.all since promises represent already started operations so you cannot queue them or make the wait before executing.
What you want to perform with limited concurrency is promise returning functions. Lucky for you - bluebird ships with this feature (As of version 2.x) using Promise.map:
Promise.map(largeArray, promiseReturningFunction, {concurrency: 16});
The concurrency parameter decides how many operations may happen at once - note that this is not a global value - but only for this chain. For example:
Promise.map([1,2,3,4,5,6,7,8,9,10], function(i){
console.log("Shooting operation", i);
return Promise.delay(1000);
}, {concurrency: 2});
Fiddle
Note that execution order is not guaranteed.
Since I was not able to find a pre-existing library to take care of promise batching, I wrote a simple primitive myself. It is a class that wraps an array of functions to be executed, and partitions it into batches of given size. It will wait for each batch to finish before running the next. It is a rather naive implementation. A full-blown throttling mechanism would probably be desirable in some networking scenarios.
Fiddle.
Code:
/**
* Executes any number of functions, one batch at a time.
*
* #see http://jsfiddle.net/93z8L6sw/2
*/
var Promise_allBatched = (function() {
var Promise_allBatched = function(arr, batchSize) {
if (arr.length == 0) return Promise.resolve([]);
batchSize = batchSize || 10;
var results = [];
return _runBatch(arr, batchSize, results, 0)
.return(results); // return all results
};
function _runBatch(arr, batchSize, results, iFrom) {
// run next batch
var requests = [];
var iTo = Math.min(arr.length, iFrom + batchSize);
for (var i = iFrom; i < iTo; ++i) {
var fn = arr[i];
var request;
if (fn instanceof Function) {
request = fn();
}
else {
request = fn;
}
requests.push(request); // start promise
}
return Promise.all(requests) // run batch
.then(function(batchResults) {
results.push.apply(results, batchResults); // store all results in one array
console.log('Finished batch: ' + results.length + '/' + arr.length);
})
.then(function() {
if (iTo < arr.length) {
// keep recursing
return _runBatch(arr, batchSize, results, iTo);
}
});
}
return Promise_allBatched;
})();
let arry=[1,2,4,........etc];//large number of array
while (arry.length) {
//I splice the arry 0,10...so you can change the limit as your //wish(note your server should be handle your limit
Promise.all(arry.splice(0, 10).map(async (eachArryElement) => {
let res = await yourAsyncMethod(eachArryElement);
}))
}
function yourAsyncMethod(data){
return new Promise((resolve, reject) => {
//your logic
resolve('your output')
})
}

Categories

Resources