Node js not resolving array of promises - javascript

I'm trying to execute several async requests and trying to get the output using promises.
If I've multiple requests queued up the Q.all(promises).then () function doesn't seem to be working. For a single request the promises are all resolved. The sample code is here.
var request = require('request');
var Q = require('q');
var sites = ['http://www.google.com', 'http://www.example.com', 'http://www.yahoo.com'];
// var sites = ['http://www.google.com']
var promises = [];
for (site in sites) {
var deferred = Q.defer();
promises.push(deferred.promise);
options = {url: sites[site]};
request(options, function (error, msg, body) {
if (error) {
deferred.reject();
}
deferred.resolve();
});
}
Q.all(promises).then (function () {
console.log('All Done');
});
What am I doing wrong here ?
Surya

Here is what I'd do in the scenario, this is the whole code:
var Q = require('q');
var request = Q.nfbind(require('request'));
var sites = ['http://www.google.com', 'http://www.example.com', 'http://www.yahoo.com'];
var requests = sites.map(request);
Q.all(requests).then(function(results){
console.log("All done") // you can access the results of the requests here
});
Now for the why:
Always promisify at the lowest level possible, promisify the request itself and not the speicific requests. Prefer automatic promisification to manual promisification to not make silly errors.
When working on collections - using .map is easier than manually iterating them since we're producing exactly one action per URL here.
This solution is also short and requires minimal nesting.

Don't use for..in to iterate over arrays. What this actually does is set site to 0, 1, 2 and this just doesn't work very well. Use some other form of iteration like a regular for loop, or Array.prototype.forEach
sites.forEach(function (site) {
var deferred = Q.defer();
promises.push(deferred.promise);
options = {url: site};

The problem is that you're changing the value of deferred on every tick of your for loop. So, the only promise which is actually resolved in your example is the last one.
To fix it you should store the value of deferred in some context. The easiest way to do so is to use Array.prototype.forEach() method instead of for loop:
sites.forEach(function (site){
var deferred = Q.defer();
var options = {url: sites[site]};
promises.push(deferred.promise);
request(options, function (error, msg, body) {
if (error) {
deferred.reject();
}
deferred.resolve();
});
})
And you also missed var declaring options variable. In JavaScript it means declaration of a global variable (or module-wide variable in node.js).

Related

Waiting for all promises called in a loop to finish

I'm using the axios promise library, but my question applies more generally I think. Right now I'm looping over some data and making a single REST call per iteration.
As each call completes I need to add the return value to an object. At a high level, it looks like this:
var mainObject = {};
myArrayOfData.forEach(function(singleElement){
myUrl = singleElement.webAddress;
axios.get(myUrl)
.then(function(response) {
mainObject[response.identifier] = response.value;
});
});
console.log(convertToStringValue(mainObject));
What's happening of course is when I call console.log the mainObject doesn't have any data in it yet, since axios is still reaching out. What's a good way of dealing with this situation?
Axios does have an all method along with a sister spread one, but they appear to be of use if you know ahead of time how many calls you'll be making, whereas in my case I don't know how many loop iterations there will be.
You need to collect all of your promises in an array and then use Promise.all:
// Example of gathering latest Stack Exchange questions across multiple sites
// Helpers for example
const apiUrl = 'https://api.stackexchange.com/2.2/questions?pagesize=1&order=desc&sort=activity&site=',
sites = ['stackoverflow', 'ubuntu', 'superuser'],
myArrayOfData = sites.map(function (site) {
return {webAddress: apiUrl + site};
});
function convertToStringValue(obj) {
return JSON.stringify(obj, null, '\t');
}
// Original question code
let mainObject = {},
promises = [];
myArrayOfData.forEach(function (singleElement) {
const myUrl = singleElement.webAddress;
promises.push(axios.get(myUrl));
});
Promise.all(promises).then(function (results) {
results.forEach(function (response) {
const question = response.data.items[0];
mainObject[question.question_id] = {
title: question.title,
link: question.link
};
});
console.log(convertToStringValue(mainObject));
});
<script src="https://unpkg.com/axios#0.19.2/dist/axios.min.js"></script>
It's described in axios docs (Performing multiple concurrent requests section).
Before May 2020 it was possible to do with axios.all(), which is now deprecated.

Alternate implementation for nodejs+mysql+Q solution?

I am trying to avoid the pyramid of doom. I have a REST function on my server that returns a "payload" object with three JSON fields for the results. Each of the three fields is calculated with a mysql query call. I figured out the nodejs way of putting the next query in the callback function of the current query, but pyramid of doom beckons!
I found this solution using Q promises which is fantastic, but I cannot figure out from the Q docs how the deferred and .makeNodeResolver work. Also the results object is intimidating and I had to find my results through trial and error... is there an alternative way to solve this problem? Am I making it harder than it has to be?
Stack Overflow answer with helpful but confusing Q syntax (programath's answer)
My code:
var queryHelper = function(request, response, payload)
{
var queryQB = queryConstructionHelper(request, '"QB"')
var queryRB = queryConstructionHelper(request, '"RB"')
var queryWRTE = queryConstructionHelper(request, '"WRTE"')
function doQuery1()
{
var deferred = Q.defer();
database.query (queryQB, deferred.makeNodeResolver());
return deferred.promise;
}
function doQuery2()
{
var deferred = Q.defer();
database.query (queryRB, deferred.makeNodeResolver());
return deferred.promise;
}
function doQuery3()
{
var deferred = Q.defer();
database.query (queryWRTE, deferred.makeNodeResolver());
return deferred.promise;
}
Q.all([doQuery1(),doQuery2(),doQuery3()]).then(function(results)
{
payLoadAssignHelper('"QB"', payload, results[0][0][0].payloadCount);
payLoadAssignHelper('"RB"', payload, results[1][0][0].payloadCount);
payLoadAssignHelper('"WRTE"', payload, results[2][0][0].payloadCount);
response.send(payload);
});
}

nodejs Q.all promises on function calling itself

I need to make a request to get a list of html, and I need to scan it and loop through it and make more requests for each item in the list found, and those might have lists inside them, and so on till theres none left.
I need a way to keep track of all the requests called and call another function when they're done. The tricky bit is the function calls itself over and over for any list items found in the HTML.
The problem I'm having is using Q promises the only promises it waits for are from first request made, and I cant understand why assuming node works like I think it does, see code:
var _ = require('underscore');
var request = require('request');
var allPromises = [];
var finalArray = [];
var start = function(id) {
var deferred = Q.defer();
request.get({
url: 'http://www.example.com/id/' + id
}, function() {
_.each(body.items, function(index) {
var item = this;
finalArray.push(item);
if(item.hasMore) {
start(item.id);
}
}
deferred.resolve();
});
allPromises.push(deferred.promise);
}
console.log('Starting');
start(1);
Q.all(allPromises).done(function (values) {
console.log('All Done');
});
What I thought happens is:
1 - starts() is called for the first time and the first deferred var is created
2 - the first request is made and the first created deferred variable is pushed to the promises array
3 - Q.all is called and waits
4 - The first request's callback is called
5 - if the request contains body.x, start() is called again with a new id
6 - a new promises is created and pushed and a new request is made
7 - the first promise is resolved
assuming this only went one level deep
8 - the second promise is resolved
9 - Q.all calls its callback
but in practice, Q.all calls its callback after the first promise, it doesn't wait for any others even though the second promise is pushed before the first promise is resolved.
Why? And how can I make this work?
Update forgot to add the loop inside the request callback.
Answer to edited question:
var request = require('request');
var finalArray = [];
var start = function(id) {
var deferred = Q.defer();
request.get({
url: 'http://www.example.com/id/' + id
}, function() {
var subitems = [];
_.each(body.items, function(index) {
var item = this;
finalArray.push(item);
if(item.hasMore) {
subitems.push(start(item.id));
}
}
if (subitems.length) {
deferred.resolve(Q.all(subitems)); // resolve result of Q.all
} else {
deferred.resolve();
}
});
return deferred.promise;
}
start(1).done(function() {
console.log('All Done');
});
#Bergi's code
var request = require('request');
var start = function(id) {
var deferred = Q.defer();
request.get({
url: 'http://www.example.com/id/' + id
}, function(err, body) {
if (err) deferred.reject(err);
else deferred.resolve(body);
});
return deferred.promise.then(function(body) {
var finalArray = [];
return Q.all(_.map(body.items, function(index) {
var item = this;
finalArray.push(item);
if(item.hasMore)
return start(item.id);
else
return [];
})).then(function(moreResults) {
return finalArray.concat.apply(finalArray, moreResults);
});
});
}
start(1).then(function(finalArray) {
console.log('All Done');
});

Structuring promises within angularjs

I have done a lot of reading around this, but ultimately the tutorials and guides I have found differ too much for me to get a decent grasp on this concept.
This is what I want to achieve:
1) Simple http request from our server [Any API for demonstration]
2) Run a function with data from (1). [Remove a property from the object]
3) Use result and length of (2) to run a loop of $http requests to our server. [Or any server]
4) This will result in 6 different objects. Run a function on these 6 objects. [Add a property]
5) Once ALL of this is done, run a separate function [Log "finished"]
How can this be achieved using promises? How do I pass data from (1) via a promise to (2)? Is this the right way to achieve what I need to do?
If anyone can show me how this should be structured it would be immensely helpful; I have kept the functions as simple as possible for this question.
Yes, promises are very nice to structure solutions for this kind of problems.
Simplified solution (more or less pseudo-code):
$http(...)
.then(function(response) {
// do something with response, for example:
var list = reponse.data.list;
// return it so that you can use it in the next 'then'.
return list;
})
.then(function(list) {
var promises = [];
angular.forEach(list, function(item) {
// perform a request for each item
var promise = $http(...).then(function(itemResponse) {
itemResponse.extraProperty = true;
return itemResponse;
});
// we make an array of promises
promises.push(promise);
});
// combine all promises into one and return it for the next then()
return $q.all(promises);
})
.then(function(itemsList) {
// itemsList is now an array of all parsed item responses.
console.log(itemsList);
});
(Hopefully this is right, I did not tested it.)
As you can see, you can return values in a callback to pass it to the next then(), or you can pass a promise, and this will result in calling the next callback when it resolves. $q.all() is used to combine multiple promises into one and resolve if all are resolved.
Edit: I realised that you can optionally leave out these three lines:
return list;
})
.then(function(list) {
But it is nice syntax though, because the separation of tasks is more visible.
Check code below, it could contains syntax error, the important is the structure. Step3 contains multiple(6) $http requests, it waits until the last request response to return a unique response object (array) containing response for each $http requets.
//Step 1
var Step1 = function () {
$http.get('api/controller').success(function (resp) {
var object1 = resp;
Step2(object1);
Step3(object1).then(function (resp) {
//resp.data is an array containing the response of each $http request
Step4(resp);
Step5();
});
});
}
//Step2
var Step2 = function(obj){
//do whatever with the object
}
//Step3
var Step3 = function (object1) {
var call = $q.defer();
var get1 = $http.get(object1[0].url);
var get2 = $http.get(object[1].url2);
//...
var get6 = $http.get(object[5].url6);
$q.all([get1, get2,..get6]).then(function (resp) {
call.resolve(resp);
});
return call.promise;
}
//Step4
var Step4 = function (resp) {
for (var i=0; i<resp.data.lenght;i++){
DoWhatEver(resp.data[i]);
};
}
//Step5
var Step5 = function () {
alert("Finished");
}
Step1(); //Call Step1 function
Don't know why you have difficulty implementing this, but maybe $q.all() is what you're missing:
var config1={method:'GET',url:'/api/...'};
$http(config1).success(function(resultsFrom1){
functionForResultsOf1(resultsFrom1);
})
var functionForResultsOf1 = function(resultsOf1){
//remove something from the result, assuming this is a synchronous operation
resultsOf1.splice()...;
var promises=makePromises(*pass whatever you want*);
$q.all(promises).then(function(aggregateOfAllCallsToServer){
angular.forEach(aggregateOfAllCallsToServer,function(data){
//do something to data from each call to the server
})
console.log("finished");
})
}
var makePromises = function(serverUrls){
var promises = [];
angular.forEach(serverUrls, function(url) {
var promise=$http({
url : '/api/'+url,
method: 'GET',
})
promises.push(promise);
});
return $q.all(promises);
}

Using $.Deferred() as a callback

in my project I'm not using callbacks instead I'm trying to use $.Deferred to have uniform logic across all application, I have lots places in my code where I do something like the following:
function someMagicHandler(request) {
var me = this;
var sandbox = me.getSandbox();
var options = request.options;
var deferred = request.deferred;
var result = [];
var databaseOperation = sandbox.database.all('records').done(function (records) {
result.concat(records);
deferred.notify(records);
});
var serverResponse;
var serverOperation = sandbox.server.requestRecords(options).then(function (response) {
// First we are trying to save received records to database
serverResponse = response;
result.concat(response.Records);
deferred.notify(response.Records);
return sandbox.database.put('records', response.Records);
}).done(function() {
sandbox.storage.setTimestamp('records', new Date(serverResponse.Timestamp));
});
$.when(databaseOperation, serverOperation).then(deferred.resolve, deferred.reject);
}
In this code I'm personally don't like one of the last lines:
$.when(databaseOperation, serverOperation).then(deferred.resolve, deferred.reject);
Is there a standard way to express:
$.when(databaseOperation, serverOperation).then(deferred);
which would essentially mean:
$.when(databaseOperation, serverOperation).then(deferred.resolve, deferred.reject, deferred.notify);
Edit
I've investigated this problem deeper, and it seems that below solution is ok only when you are not relying on deferred.progress() which has no memory and therefore will not return any data in case when subsequent async operation is complete synchronously.
Summary
If you are using $.Deferred() as a callback (i.e. when you rely on notify or progress functions and in that case you need pass it as an argument) than you will be obligated to use the ugly
blahblahblah.then(deferred.resolve, deferred.reject, deferred.notify)
You can just replace this
$.when(databaseOperation, serverOperation).then(deferred.resolve, deferred.reject);
with this:
request.deferred = $.when(databaseOperation, serverOperation);
And delete all references to the variable deferred because $.when already creates a promise for you with (as far as I read the manual).

Categories

Resources