mongodb query inside node js eachOf loop - javascript

I want to add have nested DB query inside a eachOf loop which should be synchronous. Tried so many combinations and which but nothing works for inside of foreach loop.
async.eachOf(nc.virtual_devices, function (vd) {
///////// This code work fine /////////////
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
//// THIS CODE IS NOT FINE //////////////
if (nc.type === 'XXX'){
var promise = new Promise (function (resolve,reject) {
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
db.collection('subgroups').findOne({label: labeltoSearch}, function (err, result) {
console.log(result._id);
_idofSubgroup = result._id;
db.collection('exploreposts').find({subgroup: result.id_}, {title: {"$regex": pattern}}).toArray(function (err, results) {
console.log(results);
})
});
})
}
});
Tried with promise inside it but that also in pain.
This is my tried code which is not working fine. Any suggestion would be appreciated., as simply said i have been stuck in callback hell
async.eachOf(nc.virtual_devices, function (vd) {
///////// This code work fine /////////////
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
//// THIS CODE IS NOT FINE with promises also //////////////
if (nc.type === 'XXX'){
var promise = new Promise (function (resolve,reject) {
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
db.collection('subgroups').findOne({label: labeltoSearch}, function (err, result) {
console.log(result._id);
_idofSubgroup = result._id;
resolve ({id_:_idofSubgroup,pattern1 : pattern});
})
});
promise.then (function(result) {
console.log(result.id_);
console.log(result.pattern1);
db.collection('exploreposts').find({subgroup: result.id_}, {title: {"$regex": result.pattern1}}).toArray(function (err, results) {
console.log(results);
})
},function (err){
console.log (err);
})
}
});

It doesn't seem you need to use async.eachOf, but async.each() or async.eachSeries().
This is untested but it would look something like
async.eachSeries(nc.virtual_devices, function iteratee(vd, cb) {
console.log('calling iteratee()')
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
// skip the rest if type is XXX;
// you need to explicitedly call the original callback i.e. cb
// note the use of return to prevent execution of the rest of the code
if (nc.type !== 'XXX')
return cb(null); // or cb();
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
// I have no idea what is going here
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
var pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
// we're going to use waterfall here as you have 2 async operations, where one is dependent on the other
async.waterfall([
function getSubgroup(cb1) {
console.log('calling getSubgroup')
db.collection('subgroups').findOne({ label: labeltoSearch }, function (err, subgroup) {
// if an error occurs, stop waterfall-loop
// you do this by passing the error in the callback
// again note the use of return here to prevent execution of the rest of the code
if (err) return cb1(err);
// pass the data to the next task
cb1(null, subgroup, pattern);
});
},
function getPosts(subgroup, pattern, cb2) {
// we will only get here if the last task ^ went through
console.log('calling getPosts')
db.collection('exploreposts').find({ subgroup: subgroup._id, title: { $regex: pattern }}).toArray(function (err, posts) {
// if an error occurs, stop waterfall-loop
if (err) return cb2(err);
// do something with posts
console.log('posts', posts);
// otherwise, keep going
// since there are no more waterfall-tasks, waterfall ends
cb2();
});
}
], function (err) {
console.log('waterfall() done');
// if an error occurred during the waterfall-loop, it will come down here
// we will let the original callback i.e. cb deal with this error though
if (err) return cb(err);
// otherwise we're done
// we will let the original callback know everything went well by calling it without any error
cb();
});
// you could have also simply do
// ], cb);
}, function (err) {
console.log('eachSeries() done');
// handle any error that came
console.log(err);
// send response
});
I purposely name the variables and functions so that you get an idea.
Follow the logs if there are any issues.

Related

AWS S3 / Javascript callback issue

So, I'm having a problem with JavaScript asynchronous execution when making an API call to AWS S3.
I have a sequence of nested callbacks that are working fine up until a specific S3 call that my code is not waiting for. Here's my code:
getThumbUrls(contentIndex, function(data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
});
getThumbUrls() looks like this:
function getThumbUrls(contentIndex, callback) {
console.log('Entering getThumbUrls');
var thumbUrls = [];
JSON.parse(contentIndex).forEach(videoKey => {
// get the thumbnail: bucket-name/thumbnails/<first-key>
console.log('videoKey = ' + videoKey);
getThumbFileName(videoKey, function(thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
});
});
callback(thumbUrls);
}
And getThumbFileName() looks like this:
function getThumbFileName(videoKey, callback) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
var urlKey;
//console.log('listObjects params = ' + JSON.stringify(params, null, 4));
s3.listObjectsV2(params, (err, data) => {
if (err) {
console.log(err, err.stack);
callback(err);
return;
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
});
callback(urlKey);
//callback('20161111-TheWind.jpg');
}
Obviously, what's happening is that execution doesn't wait for the s3.listObjectsV2 call to finish. I've verified that the entire flow works properly when all getThumbFileName() does is callback with the filename.
Would someone kindly show me how to force execution to wait for s3.listObjectsV2 to complete before calling back with undefined?
As discussed, you should avoid callbacks approach when dealing with asynchronous operations over iterations, due their difficulty.
(You can skip this section if you don't want to know motivation behind promises approach).
Just to mention, in a callback approach, you must have to wait for all callbacks to complete in your getThumbUrls(), using a if which will check if all callbacks has been called, then just call callback(thumbUrls); with all responses pushed into your thumbUrls array:
function getThumbUrls(contentIndex, callback) {
const thumbUrls = [];
// counter which will increment by one for every callback
let counter = 0;
JSON.parse(contentIndex).forEach(videoKey => {
getThumbFileName(videoKey, function (thumbFileName) {
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
// for each callback response you must add 1 to a counter and then
counter++;
// check if all callbacks already has been called
if (counter === JSON.parse(contentIndex).length) {
// right here, thumbsUrls are filled with all responses
callback(thumbUrls);
}
});
});
}
So, you can make use of Promises, and a Promise.all will be enough for you to handle all responses from api. You can study over internet and check your code below, which is using a promise approach. I've added some comments to help you understanding what is happening.
// when using promises, no callbacks is needed
getThumbUrls(contentIndex)
.then(function (data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
})
// when using promises, no callbacks is needed
function getThumbUrls(contentIndex) {
console.log('Entering getThumbUrls');
// not needed anymore, Promise.all will return all values
// var thumbUrls = [];
// Promise.all receives an array of promises and returns to next .then() all results
// changing forEach to map to return promises to my Promise.all
return Promise.all(JSON.parse(contentIndex).map(videoKey => {
console.log('videoKey = ' + videoKey);
// returning a promise
return getThumbFileName(videoKey)
.then(function (thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
return CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName;
});
}))
}
// when using promises, no callbacks is needed
function getThumbFileName(videoKey) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
// urlKey not need anymore
// var urlKey;
// most of AWS functions has a .promise() method which returns a promise instead calling callback funcions
return s3.listObjectsV2(params).promise()
.then(function (data) {
var thumbsKey = data.Contents;
//if you want to return only first one thumbsKey:
return thumbsKey[0];
})
.catch(function (err) {
console.log(err, err.stack);
callback(err);
return;
})
}
Hope this helps you out in your study.
Would someone kindly show me how to force execution to wait
That's the wrong question. You are not trying to get execution to "wait," or, at least, you shouldn't be. You just need to call the callback in the right place -- inside the callback from s3.listObjectsV2(), not outside.
function getThumbFileName(videoKey, callback) {
...
s3.listObjectsV2(params, (err, data) => {
if (err) {
...
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
callback(urlKey); // right
});
// wrong // callback(urlKey);
}
The way you wrote it, the callback fires after s3.getObjectsV2() begins to run -- not after it finishes (calls its own callback).

How we can use promises in node js?

As in asynchronous programming we used to callbacks and promises.
Here I am stuck in a problem that may be used to promises. I google it a lot but there is nothing found that solved my problem.
Here My code that I am doing to send push notification in android device.
router.post('/check-notifications', function(req, res, next) {
var user_id = req.body.user_id;
var response = {};
var gcm = require('push-notify').gcm({
apiKey: gcm_apiKey,
retries: 0
});
connection.query('select device_id from devices where user_id = '+ user_id, function (err, result) {
if ( result.length ) {
for (var i = 0; i < result.length; i++) {
console.log(i + 'before notify');
gcm.send({
registrationId: result[i]['device_id'],
data: result[0]
});
console.log(i + 'before transmitted');
gcm.on('transmitted', function (result, message, registrationId) {
console.log('transmitted');
});
gcm.on('transmissionError', function (error, message, registrationId) {
console.log(message);
});
console.log(i + 'after notify');
}
}
});
response['success'] = true;
response['msg'] = 'sent successfully';
res.json(response);
});
Output :
0before notify
0before transmitted
0after notify
1before notify
1before transmitted
1after notify
transmitted
transmitted
transmitted
transmitted
And I think It should be like this.
0before notify
0before transmitted
transmitted
0after notify
1before notify
1before transmitted
transmitted
1after notify
You can use async.mapSeries method for chaining notifications. Replace for loop to:
async.mapSeries(result, function(item, callback) {
gcm.send({
registrationId: item['device_id'],
data: data
});
gcm.on('transmitted', function(result, message, registrationId) {
console.log('transmitted');
callback(null, message, registrationId);
});
gcm.on('transmissionError', function(error, message, registrationId) {
callback(error, message, registrationId);
});
}, function (err, results) {
if (err) throw err;
response['success'] = true;
response['msg'] = 'sent successfully';
res.json(response);
})
I recommend using Bluebird JS for Promise flow-control.
var Promise = require('bluebird'); // Require bluebird, and call it 'Promise', the code below is version 3.x syntax
var connection = {'query': '???'}; // assuming `connection` is already defined somewhere else
var gcm_apiKey = '???'; // assuming `gcm_apiKey` is already defined
router.post('/check-notifications', function (req, res, next) {
var user_id = req.body.user_id;
var gcm = require('push-notify').gcm({
apiKey: gcm_apiKey,
retries: 0
});
// assuming `connection` is already defined somewhere else
// Make an async version of connection.query
connection.queryAsync = Promise.promisify(connection.query);
connection.queryAsync('select device_id from devices where user_id = ' + user_id)
// Bluebird's Promise.map would execute the following block once per result, asynchronously.
// The sequence of who runs first and who completes first is undefined
.map(function (result, i) {
// the `result` argument here is `result[i]` of the original code, since we're in the map context
// Here we have to create a promise to consume events
return new Promise(function (resolve, reject) {
console.log(i + 'before notify');
gcm.send({
registrationId: result['device_id'],
data: result // original code is written as result[0], which I don't quite understand. Always sending the first result?
});
// This does not make sense console logging here, as it is not actually 'before transmitted'
// It's just binding onto the event
// console.log(i + 'before transmitted');
gcm.on('transmitted', function (result, message, registrationId) {
// Check registrationId
if (registrationId === result['device_id']) {
console.log('transmitted');
resolve(result); // use 'result' as the Promise's resolved value
}
});
gcm.on('transmissionError', function (error, message, registrationId) {
// Check registrationId
if (registrationId === result['device_id']) {
console.log(message);
reject(message); // reject errors and send the message as the promise's reject reason
}
});
// Technically, you should log it as "after event binding"
console.log(i + 'after notify');
});
}).then(function (results) {
// `results` should contain all the result from the 'transmitted' event
var response = {};
response['success'] = true;
response['msg'] = 'sent successfully';
res.json(response);
});
});
Note: The is actually more or less doable without any libraries but with native Promises, but the syntax would be more cluttering.

How to return the response of multiple asynchronous calls?

Similar to this question : Return response from async call
Except that the call is within a loop that calls multiple time the asynchronous function.
Specifically, how can the value of s be returned? This code returns undefined. This function is called within a for loop. The library used for ORM is Bookshelfjs. Thanks for the help.
function getUsernameFromDBAsync(userId) {
var s = "moo";
new Model.Users({
idUser: userId
})
.fetch()
.then(function(u) {
var prenom = u.get('firstName');
var nom = u.get('familyName');
s = prenom + " " + nom;
return s;
});
}
Your aren't really showing how you're doing the loop which makes it a little harder to guess what to recommend. But assuming .fetch().then() returns a promise, here's a general idea with standard ES6 promises built into node.js:
function getUsernameFromDBAsync(userId) {
var s = "moo";
return new Model.Users({
idUser: userId
}).fetch().then(function (u) {
var prenom = u.get('firstName');
var nom = u.get('familyName');
s = prenom + " " + nom;
return s;
});
}
var userIds = [...];
var promises = [];
for (var i = 0; i < userIds.length; i++) {
promises.push(getUsernameFromDBAsync(userIds[i]));
}
// now set up a .then() handler for when all the promises are done
Promise.all(promises).then(function(names) {
// process names array here
}, function(err) {
// process error here
});
If you are using the Bluebird promise library, you could do it a little bit more simply like this:
function getUsernameFromDBAsync(userId) {
var s = "moo";
return new Model.Users({
idUser: userId
}).fetch().then(function (u) {
var prenom = u.get('firstName');
var nom = u.get('familyName');
s = prenom + " " + nom;
return s;
});
}
var userIds = [...];
Promise.map(userIds, getUsernameFromDbAsync).then(function(names) {
// process names array here
}, function(err) {
// process error here
});
Var s is unnecessary. Simply return the "prénom nom" string from then's success callback.
function getUsernameFromDBAsync(userId) {
return new Model.Users({
idUser: userId
}).fetch().then(function (u) {
return u.get('firstName') + ' ' + u.get('familyName');
});
}
Your "loop" can be achieved with Array.prototype.map() to generate an array of promises, followed by Promise.all(promises).then(...), to receive and handle an array of names when all the promises have resolved.
var promises = userIds.map(function(userId) {
return getUsernameFromDBAsync(userId);
});
Promise.all(promises).then(function(names) {
// do something with `names`, which is an array of `prénom nom` strings.
}, function(err) {
// handler error, eg ...
console.error(error);
});
Or more succinctly :
Promise.all(userIds.map(getUsernameFromDBAsync)).then(function(names) {
// do something with `names`, which is an array of `prénom nom` strings.
}, function(err) {
// handler error, eg ...
console.error(error);
});

Getting values within a nest for-loop - node.js, javascript, redis, Q library

I am trying to extract values out of a nested for-loop. My loop takes values from Redis, and I want to add these values to an array variable called "info".
The important bit is the for-loop.
app.get('/query', function(req, res) {
var info = [];
redisClient.keys("todo:*", function(err, data) {
if (err) return console.log(err);
for (var i = 0, len = data.length; i < len; i++) {
var id = data[i];
var listItem, author, goodness;
redisClient.hgetall(data[i], function(err, obj) {
listItem = obj.listItem;
author = obj.author;
goodness = {
id: id,
listItem: listItem,
author: author
}
info.push(goodness);
console.log("In here: " + info);
});
console.log("Out here: " + info);
}
console.log("Way out here: " + info);
});
console.log("WAY THE HECK OUT HERE: " + info);
});
Basically, I want the values in the variable "goodness" to be pushed to an array variable called "info". when I execute the code, the info array gets filled up here,
console.log("In here: " + info);
but I have not found a way to extract the info array to have values outside of the redisClient.hgetall() function. I have tried return statements to no avail, though as a beginning programmer there is a decent chance I'm not doing these properly.
NOTE: I have tried to take guidance from the original answer and I must be doing something wrong, or the solution given wasn't good enough, or both. I have added the Q library to my project, and have tried to find a solution. Here is my current code:
app.get('/query', function(req, res) {
var redisKeys = Q.nbind(redisClient.keys, redisClient);
var redisHGetAll = Q.nbind(redisClient.hgetall, redisClient);
var id, listItem, author;
var info = [];
redisKeys('todo:*').then(function (data) {
console.log("First: " + data);
var QAll = Q.all(data.map(processKeysFunc(info)));
console.log("Reading within this: " + data);
console.log("Next within this: " + QAll);
}, function (err) {
if (err) return console.log(err);
}).then(function () {
console.log("Finally: " + data);
})();
function processKeysFunc(array) {
return function (key) {
console.log("This is the key: " + key);
return redisHGetall(key).then(function (obj) {
console.log("This is the obj: " + obj);
array.push({
id: obj.id,
listItem: obj.listItem,
author: obj.author
});
});
};
}
});
And this is what I get within my console.log:
First: todo:281f973d-6ffd-403b-a0f4-9e8958caed35,todo:7ed8c557-0f15-4555-9119-
6777e1c952e8,todo:eb8dbee1-92ca-450e-8248-ad78548cd754,todo:712e8d27-bf9b-46f0-bfdd-
c53ef7d14441,todo:301dd91a-2b65-4b87-b129-a5ad569e38e5,todo:720d98b8-bdec-446d-a178-
fb7e264522aa,todo:d200c6cf-2ee5-443b-b7dc-d245c16899c8,todo:8169e9af-0204-42c8-9ddf-
3b00f7161b11
This is the key: todo:281f973d-6ffd-403b-a0f4-9e8958caed35
node.js is generally non-blocking, this is why callbacks are used. The callback passed to .hgetall will only execute when the data from redis has been fully received. The rest of the code around it will execute immediately and not wait for the data from redis. In fact, since the .hgetall call likely involves IO the callback will always run after the code around has executed.
You have a few options, including using Promises (https://github.com/kriskowal/q).
I'll suggest a solution that should be comprehensible the current structure of your code:
app.get('/query', function(req, res) {
var info = [];
var completed = 0;
redisClient.keys("todo:*", function(err, data) {
if (err) return console.log(err);
for (var i = 0, len = data.length; i < len; i++) {
var id = data[i];
var listItem, author, goodness;
redisClient.hgetall(data[i], function(err, obj) {
completed++;
if (err) return console.log(err);
listItem = obj.listItem;
author = obj.author;
goodness = {
id: id,
listItem: listItem,
author: author
}
info.push(goodness);
console.log("In here: " + info);
if (completed === data.length) {
// Do something with info here, all work has finished
}
});
console.log("Out here: " + info);
}
console.log("Way out here: " + info);
});
console.log("WAY THE HECK OUT HERE: " + info);
});
The key bit is the new variable completed that tracks how many callbacks have returned.
I would highly recommend using promises instead, though. Something like:
var Q = require('q');
var redisKeys = Q.nbind(redisClient.keys, redisClient);
var redisHGetall = Q.nbind(redisClient.hgetall, redisClient);
app.get('/query', function(req, res) {
var info = [];
redisKeys('todo:*').then(function (data) {
return Q.all(data.map(processKeysFunc(info));
}, function (err) { /* handle error */ }).then(function () {
console.log('Complete info array=%j', info);
res.setHeader('Content-Type', 'application/json');
res.end(JSON.stringify(info));
});
});
function processKeysFunc(array) {
return function (key) {
return redisHGetall(key).then(function (obj) {
var goodness = {
id: key,
listItem: obj.listItem,
author: obj.author
};
array.push(goodness);
});
}
}
processKeysFunc returns a function so that you can cleanly pass the info array around without needing to define every function inline. If you prefer inline, that works too though.

Calling done on an array of http.get requests in Node.js

I have an array of URLs that I'm using a for loop to call http.get requests. Since this is an async process, I'd like to call done after ALL requests have returned.
Here is my current attempt:
grunt.registerTask('verify', function() {
var done = this.async();
var promises = [];
var urlPrefix = 'http://example.com/';
for(var i = 0; i < deployableFiles.length; i++) {
(function(i) {
var deferred = Q.defer();
promises.push(deferred);
var file = deployableFiles[i];
var path = file.filetype + '/' + getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
deferred.resolve();
if(res.statusCode === 200) {
grunt.log.oklns(path + ' was found on production server.');
} else {
grunt.log.error('Error! ' + path + ' was not found on production server!');
}
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
done();
});
})(i);
}
Q.all(promises)
.done(function() {
// Everything executed correctly
return done();
}, function(reason) {
// There was an error somewhere
return done(false);
});
});
I'm sure it's just me not wrapping my head around the whole async nature of node correctly, but is there anything glaringly obvious to anyone else?
I've searched about using http with the Q library, and it appears it might be required to use Q.nfcall to get this to work. I'm just having trouble seeing WHY I'd have to do that. (I'm not adverse to actually doing that, I'm more curious than anything else)
Thanks!
If this is not a typo, promises.push(deferred) should be pushed the promise promises.push(deferred.promise).
function foo() {
...
return defer.promise;
}
// => foo().then(function() ...);
Q.all([
foo(),
foo(),
...
]).done(function() ...);
Q.all expects an array of promises. https://github.com/kriskowal/q#combination
Q.nfcall is just sugar around that if
working with functions that make use of the Node.js callback pattern, where callbacks are in the form of function(err, result)
https://github.com/kriskowal/q#adapting-node
You should always perform promisification at the lowest level possible. That makes reasoning about concurrency a lot easier.
function getPing(url){
return new Q.Promise(function(resolve,reject){
http.get(url,function(res){
// note this will _not_ wait for the whole request
// but just the headers.
if(res.statusCode === 200) resolve();
else reject();
});
});
}
This would let you do:
grunt.registerTask('verify', function() {
var done = this.async();
var urlPrefix = 'http://example.com/';
var pings = deployableFiles.map(function(file){
var path = file.filetype + '/' +
getVersionedFileName(file.basename, file.filetype);
return getPing(urlPrefix + path);
});
Q.all(pings).then(done).catch(function(reason) {
// There was an error somewhere
// this will happen as soon as _one_ promise rejected
return done(false);
});
});
This can be further shortened by using a better promise library like Bluebird.
You can also do this with async:
var urlPrefix = 'http://example.com/';
async.each(deployableFiles, function(file, cb) {
var path = file.filetype
+ '/'
+ getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
if (res.statusCode === 200)
grunt.log.oklns(path + ' was found on production server.');
else
grunt.log.error('Error! ' + path + ' was not found on production server!');
cb();
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
cb(e);
});
}, function(err) {
// all done
if (err) throw err;
// all successful
});

Categories

Resources