Calling done on an array of http.get requests in Node.js - javascript

I have an array of URLs that I'm using a for loop to call http.get requests. Since this is an async process, I'd like to call done after ALL requests have returned.
Here is my current attempt:
grunt.registerTask('verify', function() {
var done = this.async();
var promises = [];
var urlPrefix = 'http://example.com/';
for(var i = 0; i < deployableFiles.length; i++) {
(function(i) {
var deferred = Q.defer();
promises.push(deferred);
var file = deployableFiles[i];
var path = file.filetype + '/' + getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
deferred.resolve();
if(res.statusCode === 200) {
grunt.log.oklns(path + ' was found on production server.');
} else {
grunt.log.error('Error! ' + path + ' was not found on production server!');
}
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
done();
});
})(i);
}
Q.all(promises)
.done(function() {
// Everything executed correctly
return done();
}, function(reason) {
// There was an error somewhere
return done(false);
});
});
I'm sure it's just me not wrapping my head around the whole async nature of node correctly, but is there anything glaringly obvious to anyone else?
I've searched about using http with the Q library, and it appears it might be required to use Q.nfcall to get this to work. I'm just having trouble seeing WHY I'd have to do that. (I'm not adverse to actually doing that, I'm more curious than anything else)
Thanks!

If this is not a typo, promises.push(deferred) should be pushed the promise promises.push(deferred.promise).
function foo() {
...
return defer.promise;
}
// => foo().then(function() ...);
Q.all([
foo(),
foo(),
...
]).done(function() ...);
Q.all expects an array of promises. https://github.com/kriskowal/q#combination
Q.nfcall is just sugar around that if
working with functions that make use of the Node.js callback pattern, where callbacks are in the form of function(err, result)
https://github.com/kriskowal/q#adapting-node

You should always perform promisification at the lowest level possible. That makes reasoning about concurrency a lot easier.
function getPing(url){
return new Q.Promise(function(resolve,reject){
http.get(url,function(res){
// note this will _not_ wait for the whole request
// but just the headers.
if(res.statusCode === 200) resolve();
else reject();
});
});
}
This would let you do:
grunt.registerTask('verify', function() {
var done = this.async();
var urlPrefix = 'http://example.com/';
var pings = deployableFiles.map(function(file){
var path = file.filetype + '/' +
getVersionedFileName(file.basename, file.filetype);
return getPing(urlPrefix + path);
});
Q.all(pings).then(done).catch(function(reason) {
// There was an error somewhere
// this will happen as soon as _one_ promise rejected
return done(false);
});
});
This can be further shortened by using a better promise library like Bluebird.

You can also do this with async:
var urlPrefix = 'http://example.com/';
async.each(deployableFiles, function(file, cb) {
var path = file.filetype
+ '/'
+ getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
if (res.statusCode === 200)
grunt.log.oklns(path + ' was found on production server.');
else
grunt.log.error('Error! ' + path + ' was not found on production server!');
cb();
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
cb(e);
});
}, function(err) {
// all done
if (err) throw err;
// all successful
});

Related

AWS S3 / Javascript callback issue

So, I'm having a problem with JavaScript asynchronous execution when making an API call to AWS S3.
I have a sequence of nested callbacks that are working fine up until a specific S3 call that my code is not waiting for. Here's my code:
getThumbUrls(contentIndex, function(data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
});
getThumbUrls() looks like this:
function getThumbUrls(contentIndex, callback) {
console.log('Entering getThumbUrls');
var thumbUrls = [];
JSON.parse(contentIndex).forEach(videoKey => {
// get the thumbnail: bucket-name/thumbnails/<first-key>
console.log('videoKey = ' + videoKey);
getThumbFileName(videoKey, function(thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
});
});
callback(thumbUrls);
}
And getThumbFileName() looks like this:
function getThumbFileName(videoKey, callback) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
var urlKey;
//console.log('listObjects params = ' + JSON.stringify(params, null, 4));
s3.listObjectsV2(params, (err, data) => {
if (err) {
console.log(err, err.stack);
callback(err);
return;
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
});
callback(urlKey);
//callback('20161111-TheWind.jpg');
}
Obviously, what's happening is that execution doesn't wait for the s3.listObjectsV2 call to finish. I've verified that the entire flow works properly when all getThumbFileName() does is callback with the filename.
Would someone kindly show me how to force execution to wait for s3.listObjectsV2 to complete before calling back with undefined?
As discussed, you should avoid callbacks approach when dealing with asynchronous operations over iterations, due their difficulty.
(You can skip this section if you don't want to know motivation behind promises approach).
Just to mention, in a callback approach, you must have to wait for all callbacks to complete in your getThumbUrls(), using a if which will check if all callbacks has been called, then just call callback(thumbUrls); with all responses pushed into your thumbUrls array:
function getThumbUrls(contentIndex, callback) {
const thumbUrls = [];
// counter which will increment by one for every callback
let counter = 0;
JSON.parse(contentIndex).forEach(videoKey => {
getThumbFileName(videoKey, function (thumbFileName) {
thumbUrls.push(CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName);
// for each callback response you must add 1 to a counter and then
counter++;
// check if all callbacks already has been called
if (counter === JSON.parse(contentIndex).length) {
// right here, thumbsUrls are filled with all responses
callback(thumbUrls);
}
});
});
}
So, you can make use of Promises, and a Promise.all will be enough for you to handle all responses from api. You can study over internet and check your code below, which is using a promise approach. I've added some comments to help you understanding what is happening.
// when using promises, no callbacks is needed
getThumbUrls(contentIndex)
.then(function (data) {
console.log('Returning from getThumbUrls');
// let's just display thumbUrls[0] for now...
console.log('The thumbUrls are ' + data[0]);
})
// when using promises, no callbacks is needed
function getThumbUrls(contentIndex) {
console.log('Entering getThumbUrls');
// not needed anymore, Promise.all will return all values
// var thumbUrls = [];
// Promise.all receives an array of promises and returns to next .then() all results
// changing forEach to map to return promises to my Promise.all
return Promise.all(JSON.parse(contentIndex).map(videoKey => {
console.log('videoKey = ' + videoKey);
// returning a promise
return getThumbFileName(videoKey)
.then(function (thumbFileName) {
console.log('Returning from getThumbFileName');
console.log('Returned thumb filename is ' + thumbFileName);
return CLOUDFRONT_URL + videoKey + '/thumbnails/' + thumbFileName;
});
}))
}
// when using promises, no callbacks is needed
function getThumbFileName(videoKey) {
console.log('Entering getThumbFileName...');
const s3 = new AWS.S3({
apiVersion: '2006-03-01',
params: {
Bucket: 'my-bucket-name'
}
});
// Get the name of the file.
params = {
Bucket: 'my-bucket-name',
Delimiter: '/',
Prefix: videoKey + '/' + THUMBS_FOLDER,
MaxKeys: 1
};
// urlKey not need anymore
// var urlKey;
// most of AWS functions has a .promise() method which returns a promise instead calling callback funcions
return s3.listObjectsV2(params).promise()
.then(function (data) {
var thumbsKey = data.Contents;
//if you want to return only first one thumbsKey:
return thumbsKey[0];
})
.catch(function (err) {
console.log(err, err.stack);
callback(err);
return;
})
}
Hope this helps you out in your study.
Would someone kindly show me how to force execution to wait
That's the wrong question. You are not trying to get execution to "wait," or, at least, you shouldn't be. You just need to call the callback in the right place -- inside the callback from s3.listObjectsV2(), not outside.
function getThumbFileName(videoKey, callback) {
...
s3.listObjectsV2(params, (err, data) => {
if (err) {
...
}
var thumbsKey = data.Contents;
// MaxKeys was 1 bc first thumbnail key is good enough for now. Therefore, only one iteration.
thumbsKey.forEach(function (keys) {
console.log('thumbKey = ' + keys.Key);
urlKey = keys.Key;
});
callback(urlKey); // right
});
// wrong // callback(urlKey);
}
The way you wrote it, the callback fires after s3.getObjectsV2() begins to run -- not after it finishes (calls its own callback).

mongodb query inside node js eachOf loop

I want to add have nested DB query inside a eachOf loop which should be synchronous. Tried so many combinations and which but nothing works for inside of foreach loop.
async.eachOf(nc.virtual_devices, function (vd) {
///////// This code work fine /////////////
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
//// THIS CODE IS NOT FINE //////////////
if (nc.type === 'XXX'){
var promise = new Promise (function (resolve,reject) {
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
db.collection('subgroups').findOne({label: labeltoSearch}, function (err, result) {
console.log(result._id);
_idofSubgroup = result._id;
db.collection('exploreposts').find({subgroup: result.id_}, {title: {"$regex": pattern}}).toArray(function (err, results) {
console.log(results);
})
});
})
}
});
Tried with promise inside it but that also in pain.
This is my tried code which is not working fine. Any suggestion would be appreciated., as simply said i have been stuck in callback hell
async.eachOf(nc.virtual_devices, function (vd) {
///////// This code work fine /////////////
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
//// THIS CODE IS NOT FINE with promises also //////////////
if (nc.type === 'XXX'){
var promise = new Promise (function (resolve,reject) {
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
db.collection('subgroups').findOne({label: labeltoSearch}, function (err, result) {
console.log(result._id);
_idofSubgroup = result._id;
resolve ({id_:_idofSubgroup,pattern1 : pattern});
})
});
promise.then (function(result) {
console.log(result.id_);
console.log(result.pattern1);
db.collection('exploreposts').find({subgroup: result.id_}, {title: {"$regex": result.pattern1}}).toArray(function (err, results) {
console.log(results);
})
},function (err){
console.log (err);
})
}
});
It doesn't seem you need to use async.eachOf, but async.each() or async.eachSeries().
This is untested but it would look something like
async.eachSeries(nc.virtual_devices, function iteratee(vd, cb) {
console.log('calling iteratee()')
var domain = extractDomain(vd.api_url);
vd.raw_api_ip = vd.api_ip;
vd.api_ip = getProxiedPath(vd.api_ip);
vd.raw_api_url = vd.api_url;
vd.api_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.api_url.split(domain)[1];
// Path to websocket notifications
vd.ws_url = nconf.get('PROXIED_PATH_SCHEME') + vd.api_ip + vd.notification_base_uri;
// skip the rest if type is XXX;
// you need to explicitedly call the original callback i.e. cb
// note the use of return to prevent execution of the rest of the code
if (nc.type !== 'XXX')
return cb(null); // or cb();
console.log("********XX VD TYPE **********");
console.log(JSON.stringify(vd));
console.log("VD ID VALUE IS ", vd.id);
var newID = (vd.id).replace(/\d_/, "");
console.log("VD ID VALUE IS ", newID);
// I have no idea what is going here
var _idofSubgroup;
var labeltoSearch = nc.type + ' ' + nc.version;
var pattern = "/^" + newID + "/i";
test = _idofSubgroup;
pattern = newID;
console.log(pattern);
// we're going to use waterfall here as you have 2 async operations, where one is dependent on the other
async.waterfall([
function getSubgroup(cb1) {
console.log('calling getSubgroup')
db.collection('subgroups').findOne({ label: labeltoSearch }, function (err, subgroup) {
// if an error occurs, stop waterfall-loop
// you do this by passing the error in the callback
// again note the use of return here to prevent execution of the rest of the code
if (err) return cb1(err);
// pass the data to the next task
cb1(null, subgroup, pattern);
});
},
function getPosts(subgroup, pattern, cb2) {
// we will only get here if the last task ^ went through
console.log('calling getPosts')
db.collection('exploreposts').find({ subgroup: subgroup._id, title: { $regex: pattern }}).toArray(function (err, posts) {
// if an error occurs, stop waterfall-loop
if (err) return cb2(err);
// do something with posts
console.log('posts', posts);
// otherwise, keep going
// since there are no more waterfall-tasks, waterfall ends
cb2();
});
}
], function (err) {
console.log('waterfall() done');
// if an error occurred during the waterfall-loop, it will come down here
// we will let the original callback i.e. cb deal with this error though
if (err) return cb(err);
// otherwise we're done
// we will let the original callback know everything went well by calling it without any error
cb();
});
// you could have also simply do
// ], cb);
}, function (err) {
console.log('eachSeries() done');
// handle any error that came
console.log(err);
// send response
});
I purposely name the variables and functions so that you get an idea.
Follow the logs if there are any issues.

Handling multiple requests in a loop causes sync issues

I call getLogs() through a post request and get a list of LogFileID(filename) from a DB and then I pass this LogFileID to do an additional request by calling _getLogFileUrls which gives me a signed url for that ID in response. I push all of them one by one into a global array and return it the end response.
I know it's incorrect to use setTimeout but the problem is not using, it gives me a different result into the array every time. What could I do to resolve this issue? How do I correct this code so that the loop iterates to the next only when the signed url is stored into the global array.
function _getLogFileUrls(logFileId, callback){
var request = require('request'),
config = require('../../config.js');
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
},function(error, response, body) {
if (!error && response.statusCode === 200) {
callback(body);
} else {
res.status(400).send('Error requesting file service for logs:');
}
}).on('error', function(err) {
console.log('File service error for Logs: ' + err);
});
}
function getLogs(req, res){
if(!req.body.id){
return res.status(400).send('Please check the params!');
}
var date;
if(req.body.date){
date = req.body.date;
} else {
date = new Date().toISOString().slice(0,10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray= [];
hitThisQueryForMe(sqlQuery, res, function(rows){
if(!rows.length) res.json(rows);
_.each(rows, function(item){
console.log('item: ' + item.LogFileID);
_getLogFileUrls(item.LogFileID, function(response){
resArray.push(response);
});
});
setTimeout(function(){
res.send(resArray);
resArray = [];
}, 4000);
});
}
SQL injection alert
First of all, your code has a serious SQL injection vulnerability. Never use string concatenation to create SQL using user-provided data or otherwise anyone will be able to read, modify and delete anything in your database. This is very serious security issue. For more details see those answers:
cannot use backtick when using nodejs 7.3.0
How to escape mysql special characters with sockets.io/node.js/javascript
The answer
Now to answer your question. To handle what you try to do here you should either stick to callbacks and use a good module to handle concurrency like Async:
https://caolan.github.io/async/
Or you can use promises with a good module to help with concurrency like Q or Bluebird:
http://documentup.com/kriskowal/q/
http://bluebirdjs.com/
Additionally when working with promises you can use generator-based coroutines with tools like co or Bluebird.coroutine:
https://github.com/tj/co
http://bluebirdjs.com/docs/api/promise.coroutine.html
Or you can use ES8 async/await:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
Those are the main ways to handle cases like yours. Reinventing the wheel of concurrency handling can lead (as you can see here) to error-prone, hard to maintain code.
I recommend using the right tool for the job.
Use async/await
Install asyncawait library and its dependency bluebird:
npm install asyncawait --save
npm install bluebird --save
Your edited code should look like:
const async = require('asyncawait/async');
const await = require('asyncawait/await');
const Promise = require('bluebird');
const request = require('request');
const config = require('../../config.js');
function _getLogFileUrls(logFileId) {
return new Promise((resolve, reject) => {
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
resolve(body);
} else {
reject('Error requesting file service for logs:');
}
}).on('error', function (err) {
console.log('File service error for Logs: ' + err);
});
});
}
function getLogs(req, res) {
if (!req.body.id) {
return res.status(400).send('Please check the params!');
}
var date;
if (req.body.date) {
date = req.body.date;
} else {
date = new Date().toISOString().slice(0, 10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray = [];
hitThisQueryForMe(sqlQuery, res, function (rows) {
if (!rows.length) res.json(rows);
_.each(rows, (async function (item) {
console.log('item: ' + item.LogFileID);
var logFileUrlResponse = await (_getLogFileUrls(item.LogFileID));
resArray.push(logFileUrlResponse);
}));
res.send(resArray);
resArray = [];
});
}

Bluebird Promise Chains: 'Catch' with Result

In order to make this question as useful to as many people as possible, I will exclude my specific implementation details beyond that fact that I am using the Bluebird promise library with Node + Express below.
So, let's say that I have the following chain (where P returns a promise, and res is the Express HTTP response object):
P().then(function(){
// do nothing if all went well (for now)
// we only care if there is an error
}).catch(function(error){
res.status(500).send("An error occurred");
}).then(function(){
return P();
}).then(function(pVal1){
return [pVal1, P()];
}) // TODO: catch an error from P() here and log pVal1
.spread(function(pVal1, pVal2){
if(pVal1 === pVal2) {
console.log("Success!");
} else {
console.log("Failure");
}
});
Where I have placed the TODO comment above is where I would like to catch an error that might occur from my call to P. If I do catch an error, I would like to log pVal1 and then send a 500 error, as is done in the first catch. However, I am not sure if this is possible with how I am structuring my chain.
I believe that I need to do some "branching," but I do not think that I understand this concept well enough to stop the asynchronous nature of JavaScript from getting the best of me! As such, any help is thoroughly appreciated.
Don't forget to catch errors in the end of the chain. That's also the place to send the response.
Catching errors in the middle of a chain is for intermittent error handling; the chain continues to run, so don't send a response just yet.
Here is something to try it out:
// example middleware
function handle(req, res, next) {
log("----------------");
return async("p1", "foo").then(function (pVal1) {
return pVal1;
}).then(function (pVal1) {
var p2a = async("p2a", "bar"),
p2b = async("p2a", "bar").catch(function (error) {
log("Logging: " + error + " (pVal1 " + pVal1 + ")");
});
return [p2a, p2b];
}).spread(function (pVal1, pVal2) {
if (pVal1 === pVal2) {
res.send("Success!");
} else {
res.send("Failure");
}
}).catch(function (error) {
res.status(500).send("An error occurred");
log("Logging: " + error);
});
}
// ---------------------------------------------------------------------
// mockup response object
var res = {
status: function (code) {
log("Sending status: " + code);
return this;
},
send: function () {
log("Sending response: " + [].join.call(arguments, " "));
return this;
}
};
// mockup promise generator
function async(name, value) {
return new P(function (resolve, reject) {
if ( confirm("let " + name + " succeed?") ) {
log(name + " succeeds...");
resolve(value);
} else {
log(name + " fails...");
reject(name + " has failed");
}
});
}
function log() {
var msg = document.createElement("DIV");
msg.textContent = [].join.call(arguments, " ");
document.getElementById("log").appendChild(msg)
document.body.scrollTop = document.body.scrollHeight;
}
button {
position: fixed;
top: 5px;
}
<script src="http://cdnjs.cloudflare.com/ajax/libs/bluebird/2.9.33/bluebird.min.js"></script>
<button onclick="handle(null, res, null)">Go</button>
<div id="log"></div>
This is possible if you use the explicit Promise.all instead of returning an array into .spread.
}).then(function(pVal1){
// this becomes a `Promise.all` - the aggregation is explicit
var all = Promise.all([pVal1, P()]);
all.catch(function(e){ // "branching", we both return and `catch` the promise
console.log("Error, pVal1 is", pVal1);
});
return all; // return it
}).spread(function(pVal1, pVal2){
// ....
});

Having trouble with promises in nodejs

I'm trying to use promises with nodejs (I'm trying with node-promise package); however, without any success. See the code below:
var express = require('express'),
request = require('request'),
promise = require('node-promise');
app.get('/promise', function(req, res) {
var length = -1;
new promise.Promise(request(
{uri: "http://www.bing.com"},
function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log("An error occurred when connected to the web site");
return;
}
console.log("I'll return: " + body.length);
length = body.length;
}
)).then(function(result) {
console.log("This is what I got: " + length);
console.log("Done!");
});
res.end();
});
The output of the above code is I'll return: 35857 only and it doesn't go to the then part.
I change the code then to be:
app.get('/promise', function(req, res) {
var length = -1;
promise.when(
request(
{uri: "http://www.bing.com"},
function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log("An error occurred when connected to the web site");
return;
}
console.log("I'll return: " + body.length);
length = body.length;
}
),
function(result) {
console.log("This is what I got: " + length);
console.log("Done!");
},
function(error) {
console.log(error);
}
);
res.end();
});
This time the output is This is what I got: -1 then Done!... looks like the "promise" was not called this time.
So:
What's needed to be done to fix the code above? Obviously I'm not doing it right :)
Is node-promise "the way to go" when I'm doing promises, or is there a better way/package? i.e. simpler and more production-ready.
Thanks.
Try jquery-deferred-for-node.
I'm not an expert but understand that this lib tends to be favoured by programmers who work both server-side and client-side.
Even if you don't already know jQuery's Deferreds, the advantages of going this route are that :
the documentation is excellent (it comprises links to the jQuery docs), though you may struggle to find examples specific to Node.
methods are chainable.
jQuery Callbacks are also included.
when one day you need to do asynchronous stuff client-side, then there's virtually nothing to relearn - the concepts are identical and the syntax very nearly so. See the "Correspondances" section in the github page hyperlinked above.
EDIT
I'm not a node.js person so I'm guessing here but based on your code above, you might want to consider something along the following lines with jquery-deferred-for-node :
var express = require('express'),
request = require('request'),
Deferred = require('JQDeferred');
function fetch(uri, goodCodes) {
goodCodes = (!goodCodes) ? [200] : goodCodes;
var dfrd = Deferred(); // A Deferred to be resolved/rejected in response to the `request()`.
request(uri, function(error, response, body) {
if (!error) {
var isGood = false;
// Loop to test response.statusCode against `goodCodes`.
for (var i = 0; i < goodCodes.length; i++) {
if (response.statusCode == goodCodes[i]) {
isGood = true;
break;
}
}
if (isGood) {
dfrd.resolve(response.statusCode, body);
} else {
dfrd.reject(response.statusCode, "An invalid response was received from " + uri);
}
} else {
dfrd.reject(response.statusCode, "An error occurred attempting to connect to " + uri);
}
});
// Make promise derived from dfrd available to "consumer".
return dfrd.promise();
};
//...
app.get('/promise', function(req, resp) {
fetch("http://www.bing.com").done(function(statusCode, result) {
console.log("Done! This is what I got: " + result.length);
}).fail(function(statusCode, message) {
console.log("Error (" + statusCode + "): " + message);
});
resp.end();
};
Here, I have tried to write a generalized utility for fetching a resource in such a way that the asynchronous response (or error) can be handled externally. I think this is broadly along the lines of what you were trying to achieve.
Out of interest, where do console.log() messages end up with node.js?
EDIT 2
Above, I have given Deferred an initial capital, as is conventional for Constructors
With jQuery Deferreds, there must be any number of ways to fetch() consecutively. The approach below leaves fetch() as it was, and introduces fetch_() to act as its front-end. There may be simpler ways but this allows fetch() to remain a general utility, functionally equivalent to the client-side jQuery.ajax().
function fetch_(uri){
return function(){
return fetch(uri, [200]).then(function(statusCode, result){
console.log("Done! This is what I got: " + result.length);
},function(statusCode, message){
console.log("Error (" + statusCode + "): " + message);
});
};
}
Note that function fetch() returns a function. It has to be like this because where fetch() is called, we want an unexecuted function, not (yet) the result of that function.
Now let's assume an array of uris is available. This can be hard-coded or built dynamically - whatever the application demands.
var uris = [
'http://xxx.example.com',
'http://yyy.example.com',
'http://zzz.example.com'
];
And now, a variety of ways in which fetch_() might be called :
//v1. To call `resp.end()` when the fetching process starts.
app.get('/promise', function(req, resp) {
fetch_(uris[0])().then(fetch_(uris[1])).then(fetch_(uris[2]));
resp.end();
});
//v2. To call `resp.end()` when the fetching process has finished.
app.get('/promise', function(req, resp){
fetch_(uris[0])().then(fetch_(uris[1])).then(fetch_(uris[2])).always(resp.end);
});
//v3. As v2 but building a `.then()` chain of any (unknown) length.
app.get('/promise', function(req, resp){
var dfrd = Deferred().resolve();//
$.each(uris, function(i, uri){
dfrd = dfrd.then(fetch_(uri));
});
dfrd = dfrd.always(resp.end);
});
untested
I have more confidence in v1 and v2. v3 may work.
v2 and v3 should both give exactly the same behaviour but v3 is generalized for any number of uris.
Everything may need debugging.
I would recommend using Q: https://github.com/kriskowal/q. I believe that it's used internally by other frameworks (like jQuery deferred implementation).
I believe that the documentation is "fine"; the syntax is consistent with other promise implementations... and it has a node adapter.
So your deferred style approach:
var deferred = Q.defer();
FS.readFile("foo.txt", "utf-8", function (err, res) {
if (!err) {
deferred.resolve(res);
} else {
deferred.reject(err);
}
});
return deferred.promise;
Can be written more concisely as:
var deferred = Q.defer();
FS.readFile("foo.txt", "utf-8", deferred.makeNodeResolver());
return deferred.promise;

Categories

Resources