ES6 Dyanmic Promise Chaining from array - javascript

Scenario
I have an array of URLs that I need to download, however each must also be supplied with a unique transaction ID that must be requested from the server and only increments when a request is successful.
Problem
As I loop through the array I need to wait for both the request for the transaction ID and the request for the file to complete before starting the next iteration of the loop but the number of files is not fixed so need to dynamically build a chain of promises.
Pseudocode
Below is some pseudocode, getFiles() is the problem because all the requests get the same transaction Id as they don't wait for the previous request to finish.
function getTransationId(){
return new Promise((resolve,reject)=> {
let id = getNextTransactionId();
if(id!=error){
resolve(id);
}else{
reject(error);
}
})
}
function getFile(url, transactionId){
return new Promise((resolve,reject)=>{
http.request(url+transactionId, function(err,response){
if(err){
reject(err);
}else{
resolve(response);
}
});
});
}
function getFilesFromArray(urlArray){
for(let url of urlArray){
getTransactionId().then(resolve=>getFile(url,resolve),reject=>console.error(reject));
}
}
Question
How do I chain chain promises together dynamically?
Answer
Here's a JSFiddle of Ovidiu's answer

A functional approach is to use reduce to iterate and return a final promise chained up from each sub-promise. It also helps building the results e.g. in an array:
function getFilesFromArray(urlArray){
const filesPromise = urlArray.reduce((curPromise, url) => {
return curPromise
.then(curFiles => {
return getTransactionId()
.then(id => getFile(url, id))
.then(newFile => [...curFiles, newFile]);
});
}, Promise.resolve([]));
filesPromise.then(files => {
console.log(files);
}
}
This effectively builds a promise chain that:
starts with a static Promise with a value [] representing the initial set of files: Promise.resolve([])
on each iteration, returns a promise that waits for the curPromise in the chain and then
performs getTransactionId and uses the id to getFile
once the file will be retrieved it will return an array containing the curFiles set in the the curPromise (previous values) and concatenates the newFile into it
the end result will be a single promise with all files collected

You can do something along these lines
function getAllFiles(i, results, urlArray) {
if(i == urlArray.length) return;
getTransationId().then(id => {
return new Promise((resolve, reject) => {
http.request(urlArray[i] + id, (err, response) => {
if(err){
reject();
}else{
results.push(response);
resolve();
}
});
});
}).then(() => {
getAllFiles(i + 1, results, urlArray);
})
}

Try using async/await.
Read more here
async function getFilesFromArray(urlArray) {
for(let url of urlArray){
//wrap this in a try/catch block if you want to continue with execution
//if you receive an error from one of the functions
const transactionId =await getTransactionId()
const file = await getFile(url,transactionId)
}
}

You can simplify logic if you run it via synchronous executor nsynjs. Nsynjs will pause when some function evaluates to promise, and then assigns the result to data property. The code will transform like this:
function getFilesFromArray(urlArray){
for(var i = 0; i<urlArray.length; i++) {
var trId = getTransactionId().data;
// trId is ready here
var fileContent = getFile(urlArray[i],trId).data;
// file data is ready here
console.log('fileContent=',fileContent);
};
};
nsynjs.run(getFilesFromArray,{},urls,function(){
console.log('getFilesFromArray is done');
});
getFilesFromArray can be further simplified to:
function getFilesFromArray(urlArray){
for(var i = 0; i<urlArray.length; i++) {
var fileContent = getFile(urlArray[i],getTransactionId().data).data;
console.log('fileContent=',fileContent);
};
};

Related

How to wait for the iteration to finish when pushing result of a callback function into an array

What is the correct way to implement array.push so that it "array_of_results" is returned after the forEach iteration if finished?
const postgres = require("./postgres");
function get_array(value) {
var array_of_results = []
value.forEach( item => {
postgres.query(item["id"],function(res){
console.log(res) //gives proper res after empty array
array_of_results.push(res);
})
});
console.log(array_of_results)// prints empty array
return array_of_results;
}
Edit:
and postgres.js looks like :
const { Pool } = require("pg");
const pool = new Pool();
var query_string = "select...."
function query(id, call) {
pool.query(query_string, [id], (err, res) => {
if (err) {
console.log(err.stack)
} else {
call(res.rows[0])
}
})
}
module.exports = {
query
}
There are a few ways to do this, but first you need to understand what is actually happening.
In postgres.query(item["id"],function(res){ you are calling postgres.query with (1) an item ID and (2) a callback function. That call happens and then immediately continues in your calling code. So now you've just sent a bunch of requests to your database, and then immediately return an empty array. Those callbacks (2) have not been called yet.
To get the data back to your calling function, you'll need to either pass a callback instead of using return, or change to async/await.
Using async/await in every iteration of your loop is not as efficient, as you're waiting for each call to return sequentially. For the most efficient method, you will need to fire the requests and wait for them all to complete. You can do this by using promises.
You can modify your code to push a promise into an array for each iteration of the loop, then call (and await) Promise.all on the array of promises.
Here's a basic rewrite for you:
postgres.js:
function query(id) {
return new Promise((resolve, reject) => {
pool.query(query_string, [id], (err, res) => {
if (err) {
console.log(err.stack)
reject(err)
} else {
resolve(res.rows[0])
}
})
})
}
module.exports = {
query
}
get_array implementation :
async function get_array(value) {
var array_of_promises = [], array_of_results = []
value.forEach( item => {
array_of_promises.push(postgres.query(item["id"]));
});
array_of_results = await Promise.all(array_of_promises);
console.log(array_of_results)// prints populated array
return array_of_results;
}
Note that when you call get_array you'll have to use await before the call, e.g. change var array = get_array(items) to var array = await get_array(items) and using await in a function requires it to be declared as an async function.
If you can't declare it as an async function, you may change the calling code to consume the promise:
var arrayPromise = get_array(items);
arrayPromise.then((results) => {
// do something with results
// but remember you cannot _return_ from within a callback, as discussed above
});

Javascript: Making sure one async function doesn't run until the other one is complete; working with promises

I'm working with fetching information from a github repository. I want to get the list of pull requests within that repo, get the list of commits associated with each pull request, then for each commit I want to get information such as the author of the commit, the number of files associated with each commit and the number of additions and deletions made to each file. I'm using axios and the github API to accomplish this. I know how to work with the API, but the promises and async functions are keeping me from accomplishing my task. I have the following code:
const axios = require('axios');
var mapOfInformationObjects = new Map();
var listOfCommits = [];
var listOfSHAs = [];
var gitApiPrefix = link I'll use to start fetching data;
var listOfPullRequestDataObjects = [];
var listOfPullRequestNumbers = [];
var mapOfPullNumberToCommits = new Map();
function getAllPullRequests(gitPullRequestApiLink) {
return new Promise((resolve, reject) => {
axios.get(gitPullRequestApiLink).then((response) =>{
listOfPullRequestDataObjects = response['data'];
var k;
for (k = 0; k < listOfPullRequestDataObjects.length; k++){
listOfPullRequestNumbers.push(listOfPullRequestDataObjects[k]['number']);
}
resolve(listOfPullRequestNumbers);
}).catch((error) => {
reject(error);
})
})
}
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
var j;
for (j = 0; j < listOfPRNumbers.length; j++) {
currPromise = new Promise((resolve, reject) => {
currentGitApiLink = gitApiPrefix + listOfPRNumbers[j] + "/commits";
axios.get(currentGitApiLink).then((response) => {
mapOfPullNumberToCommits.set(listOfPRNumbers[j], response['data']);
resolve("Done with Pull Request Number: " + listOfPRNumbers[j]);
}).catch((error) => {
reject(error);
})
})
}
}
function getListOfCommits(gitCommitApiLink){
return new Promise((resolve, reject) => {
axios.get(gitCommitApiLink).then((response) => {
resolve(response);
}).catch((error) => {
reject(error);
})
})
}
So far, I made some functions that I would like to call sequentially.
First I'd like to call getAllPullRequestNumbers(someLink)
Then I'd like to call getCommitsForEachPullRequestNumber(listofprnumbers)
Then getListOfCommits(anotherLink)
So it would look something like
getAllPullRequestNumbers(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
But two problems arise:
1) I'm not sure if this is how you would call the functions so that the first function in the sequence completes before the other.
2) Because I'm not familiar with Javascript, I'm not sure, especially with the getCommitsForEachPullRequestNumber function since you run a loop and call axios.get() on each iteration of the loop, if this is how you work with promises within the functions.
Would this be how you would go about accomplishing these two tasks? Any help is much appreciated. Thanks!
When you a number of asynchronous operations (represented by promises) that you can run all together and you want to know when they are all done, you use Promise.all(). You collect an array of promises and pass it to Promise.all() and it will tell you when they have all completed or when one of them triggers an error. If all completed, Promise.all() will return a promise that resolves to an array of results (one for each asynchronous operation).
When you're iterating an array to do your set of asynchronous operations, it then works best to use .map() because that helps you create a parallel array of promises that you can feed to Promise.all(). Here's how you do that in getCommitsForEachPullRequestNumber():
function getCommitsForEachPullRequestNumber(listOfPRNumbers) {
let mapOfPullNumberToCommits = new Map();
return Promise.all(listOfPRNumbers.map(item => {
let currentGitApiLink = gitApiPrefix + item + "/commits";
return axios.get(currentGitApiLink).then(response => {
// put data into the map
mapOfPullNumberToCommits.set(item, response.data);
});
})).then(() => {
// make resolved value be the map we created, now that everything is done
return mapOfPullNumberToCommits;
});
}
// usage:
getCommitsForEachPullRequestNumber(list).then(results => {
console.log(results);
}).catch(err => {
console.log(err);
});
Then, in getListOfCommits(), since axios already returns a promise, there is no reason to wrap it in a manually created promise. That is, in fact, consider a promise anti-pattern. Instead, just return the promise that axios already returns. In fact, there's probably not even a reason to have this as a function since one can just use axios.get() directly to achieve the same result:
function getListOfCommits(gitCommitApiLink){
return axios.get(gitCommitApiLink);
}
Then, in getAllPullRequests() it appears you are just doing one axios.get() call and then processing the results. That can be done like this:
function getAllPullRequests(gitPullRequestApiLink) {
return axios.get(gitPullRequestApiLink).then(response => {
let listOfPullRequestDataObjects = response.data;
return listOfPullRequestDataObjects.map(item => {
return item.number;
});
});
}
Now, if you're trying to execute these three operations sequentially in this order:
getAllPullRequests(someLink)
getCommitsForEachPullRequestNumber(listofprnumbers)
getListOfCommits(anotherlink)
You can chain the promises from those three operations together to sequence them:
getAllPullRequests(someLink)
.then(getCommitsForEachPullRequestNumber)
.then(mapOfPullNumberToCommits => {
// not entirely sure what you want to do here, perhaps
// call getListOfCommits on each item in the map?
}).catch(err => {
console.log(err);
});
Or, if you put this code in an async function, then you can use async/awit:
async function getAllCommits(someLink) {
let pullRequests = await getAllPullRequests(someLink);
let mapOfPullNumberToCommits = await getCommitsForEachPullRequestNumber(pullRequests);
// then use getlistOfCommits() somehow to process mapOfPullNumberToCommits
return finalResults;
}
getAllCommits.then(finalResults => {
console.log(finalResults);
}).catch(err => {
console.log(err);
});
not as clean as jfriend00 solution,
but I played with your code and it finally worked
https://repl.it/#gui3/githubApiPromises
you get the list of commits in the variable listOfCommits
I don't understand the purpose of your last function, so I dropped it

Sequential execution of Promise.all

Hi I need to execute promises one after the other how do I achieve this using promise.all any help would be awesome. Below is the sample of my code I am currently using but it executes parallel so the search will not work properly
public testData: any = (req, res) => {
// This method is called first via API and then promise is triggerd
var body = req.body;
// set up data eg 2 is repeated twice so insert 2, 5 only once into DB
// Assuming we cant control the data and also maybe 3 maybe inside the DB
let arrayOfData = [1,2,3,2,4,5,5];
const promises = arrayOfData.map(this.searchAndInsert.bind(this));
Promise.all(promises)
.then((results) => {
// we only get here if ALL promises fulfill
console.log('Success', results);
res.status(200).json({ "status": 1, "message": "Success data" });
})
.catch((err) => {
// Will catch failure of first failed promise
console.log('Failed:', err);
res.status(200).json({ "status": 0, "message": "Failed data" });
});
}
public searchAndInsert: any = (data) => {
// There are database operations happening here like searching for other
// entries in the JSON and inserting to DB
console.log('Searching and updating', data);
return new Promise((resolve, reject) => {
// This is not an other function its just written her to make code readable
if(dataExistsInDB(data) == true){
resolve(data);
} else {
// This is not an other function its just written her to make code readable
insertIntoDB(data).then() => resolve(data);
}
});
}
I looked up in google and saw the reduce will help I would appreciate any help on how to convert this to reduce or any method you suggest (Concurrency in .map did not work)
the Promises unfortunatelly does not allow any control of their flow. It means -> once you create new Promise, it will be doing its asynchronous parts as they like.
The Promise.all does not change it, its only purpose is that it checks all promises that you put into it and it is resolved once all of them are finished (or one of them fail).
To be able to create and control asynchronous flow, the easiest way is to wrap the creation of Promise into function and create some kind of factory method. Then instead of creating all promises upfront, you just create only one promise when you need it, wait until it is resolved and after it continue in same behaviour.
async function doAllSequentually(fnPromiseArr) {
for (let i=0; i < fnPromiseArr.length; i++) {
const val = await fnPromiseArr[i]();
console.log(val);
}
}
function createFnPromise(val) {
return () => new Promise(resolve => resolve(val));
}
const arr = [];
for (let j=0; j < 10; j++) {
arr.push(createFnPromise(Math.random()));
}
doAllSequentually(arr).then(() => console.log('finished'));
PS: It is also possible without async/await using standard promise-chains, but it requires to be implemented with recursion.
If anyone else cares about ESLint complaining about the use of "for" and the "no await in loop" here is a typescript ESLint friendly version of the above answer:
async function runPromisesSequentially<T>(promises: Array<Promise<T>>):Promise<Array<T>> {
if (promises.length === 0) return [];
const [firstElement, ...rest] = promises;
return [await firstElement, ...(await runPromisesSequentially(rest))];
}
You can then just replace Promise.all by runPromisesSequentially.
#lmX2015's answer is close but it's taking in promises that have already started executing.
A slight modification fixes it
export async function runPromisesSequentially<T>(functions: (() => Promise<T>)[]): Promise<T[]> {
if (functions.length === 0) {
return [];
}
const [first, ...rest] = functions;
return [await first(), ...(await runPromisesSequentially(rest))];
}

Promise not fulfilling in nodejs

I'm working on nodejs app currently, one part of which, tests some api calls and then returns as a promise, then perform another function.
So - I'm looping through an array of promises with the following two functions:
Over all function for all apis
function testAllApis(apiList, counter = 0){
return new Promise(function(fulfill, reject){
console.log(counter);
if(counter == apiList.length){
console.log('test1');
fulfill();
console.log('test2');
}
else {
testSingleApi(apiList[counter]).then(() => {
testAllApis(apiList, counter + 1);
}).catch((e) => {
console.log(e);
reject(e);
testAllApis(apiList, counter + 1);
})
}
})
}
Function for each individual array
function testSingleApi(thisApi){
return new Promise(function(fulfill, reject){
var apiUrl = '/api/' + thisApi.substr(7).slice(0, -3) + '/testapi/';
var options = {
hostname: serverHost,
port: serverPort,
path: apiUrl,
method: 'GET',
};
var req = http.request(options, (res) => {
console.log(res.statusCode);
fulfill(res.statusCode);
});
req.on('error', (e) => {
reject(e.message);
});
req.end();
});
}
When I call this in the terminal it functions as intended, and console logs the success codes (200) of the api calls I am making, but after the third one, when 'counter' is equal to the length of the array, it goes into the if condition in the testAllApis function, console logs 'test1', then 'test2' and doesn't fulfill() at all.
Does anyone have any insight into this? I am still quite new to promises and tried searching for a solution to this online but it was quite a specific question so thought to post here instead.
It is easier to use reduce to run promises sequentially:
var funcs = apiList.map((api) => testSingleApi(api));
var promiseSerial = (funcs) =>
funcs.reduce((promise, func) =>
promise.then(result = func().then(Array.prototype.concat.bind(result))),
Promise.resolve([]));
promiseSerial(promises)
.then(...)
.catch(...);
You can avoid recursion and get cleaner and more manageable code if you use async/await.
If your node supports async/await, you can refactor you logic like this:
async function testAllApis(apiList){
for(var i=0; i<apiList.length; i++)
console.log(await testSingleApi(apiList[i]));
}
testAllApis(apiList).then(function(){
console.log("all done at this point")
});
If your node does not supports async/await, you can use nsynjs module and change your code like this:
nsynjs = require('nsynjs');
...
function testAllApis(apiList, testSingleApi){
for(var i=0; i<apiList.length; i++)
console.log(testSingleApi(apiList[i]).data);
}
nsynjs.run(testAllApis,{},apiList,testSingleApi,function(){
console.log("all done at this point");
})

How do I wait until an asynchronous process inside a loop is finished before exiting the loop?

I have some asynchronous code running inside a JavaScript forEach loop. I want to wait until the code inside the asynchronous process has finished running before proceeding after the loop.
Example below:
ids is an array of strings. db is a node module I created to work with MongoDB
var appIdsNotFound = "";
var count = 0;
ids.forEach(function(id) {
output[count] = {};
//console.log(id);
db.findApp(id, function(error, result) {
if(error) {
fatalError = true;
console.log(error);
} else {
if (result) {
output[count] = result;
//console.log(output[count]);
count++;
} else {
appNotFound = true;
appIdsNotFound += id + ", ";
console.log(appIdsNotFound);
}
}
});
});
//more code that we want to wait before executing
Is there a way to wait before executing the rest of the code that is outside the loop, and if so, how would I go about doing that.
Assuming db is some module to access your DB, try to look for the synchronous version. This assumes you are ok with synchronous, since you're attempting to write it that way, waiting for everything before proceeding.
If your db library uses promises, you can use it in conjunction with Promise.all. Fire a request for each item, collect all their promises in an array, feed them to Promise.all. The promise from Promise.all will resolve when all promises resolve.
const promises = ids.map(id => db.promiseReturningFindApp(id));
const allRequests = Promise.all(promises).then(responses => {
// responses is an array of all results
});
If you don't have a promise-returning version of your API, wrap db.findApp in a promise, do suggestion #2.
function promiseReturningFindApp(id){
return new Promise((resolve, reject) => {
db.findApp(id, (error, result) => {
if(error) reject(error);
else resolve(result);
});
});
}
Options 2 and 3 are asynchronous, and as such, you technically don't "wait". Therefore, code that needs to execute after can only reside in a callback.
You could make every item into an function and use async:
var async = require('async');
var output = [], appsNotFound = [];
var appRequests = ids.map((id) => (cb) => {
db.findApp(id, (error, result) => {
if (error) {
appsNotFound.push(id);
return cb();
}
output.push(id);
return cb();
})
})
async.parallel(appRequests, () => {
console.log('N# of Apps found',output.length);
console.log("Ids not found:",appIdsNotFound.join(','))
console.log("N# Apps not found:",appIdsNotFound.length)
})
If the DB don't handle it, try to use async.serial.
You can make something similar with promises if you prefer, but this requires less lines of code.

Categories

Resources