Handling multiple requests in a loop causes sync issues - javascript

I call getLogs() through a post request and get a list of LogFileID(filename) from a DB and then I pass this LogFileID to do an additional request by calling _getLogFileUrls which gives me a signed url for that ID in response. I push all of them one by one into a global array and return it the end response.
I know it's incorrect to use setTimeout but the problem is not using, it gives me a different result into the array every time. What could I do to resolve this issue? How do I correct this code so that the loop iterates to the next only when the signed url is stored into the global array.
function _getLogFileUrls(logFileId, callback){
var request = require('request'),
config = require('../../config.js');
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
},function(error, response, body) {
if (!error && response.statusCode === 200) {
callback(body);
} else {
res.status(400).send('Error requesting file service for logs:');
}
}).on('error', function(err) {
console.log('File service error for Logs: ' + err);
});
}
function getLogs(req, res){
if(!req.body.id){
return res.status(400).send('Please check the params!');
}
var date;
if(req.body.date){
date = req.body.date;
} else {
date = new Date().toISOString().slice(0,10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray= [];
hitThisQueryForMe(sqlQuery, res, function(rows){
if(!rows.length) res.json(rows);
_.each(rows, function(item){
console.log('item: ' + item.LogFileID);
_getLogFileUrls(item.LogFileID, function(response){
resArray.push(response);
});
});
setTimeout(function(){
res.send(resArray);
resArray = [];
}, 4000);
});
}

SQL injection alert
First of all, your code has a serious SQL injection vulnerability. Never use string concatenation to create SQL using user-provided data or otherwise anyone will be able to read, modify and delete anything in your database. This is very serious security issue. For more details see those answers:
cannot use backtick when using nodejs 7.3.0
How to escape mysql special characters with sockets.io/node.js/javascript
The answer
Now to answer your question. To handle what you try to do here you should either stick to callbacks and use a good module to handle concurrency like Async:
https://caolan.github.io/async/
Or you can use promises with a good module to help with concurrency like Q or Bluebird:
http://documentup.com/kriskowal/q/
http://bluebirdjs.com/
Additionally when working with promises you can use generator-based coroutines with tools like co or Bluebird.coroutine:
https://github.com/tj/co
http://bluebirdjs.com/docs/api/promise.coroutine.html
Or you can use ES8 async/await:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function
Those are the main ways to handle cases like yours. Reinventing the wheel of concurrency handling can lead (as you can see here) to error-prone, hard to maintain code.
I recommend using the right tool for the job.

Use async/await
Install asyncawait library and its dependency bluebird:
npm install asyncawait --save
npm install bluebird --save
Your edited code should look like:
const async = require('asyncawait/async');
const await = require('asyncawait/await');
const Promise = require('bluebird');
const request = require('request');
const config = require('../../config.js');
function _getLogFileUrls(logFileId) {
return new Promise((resolve, reject) => {
var fileParams = {
fileName: 'xyzdirectory/' + logFileId
};
request.post({
url: config.filesServiceUrl + 'get-logfile-urls',
json: fileParams
}, function (error, response, body) {
if (!error && response.statusCode === 200) {
resolve(body);
} else {
reject('Error requesting file service for logs:');
}
}).on('error', function (err) {
console.log('File service error for Logs: ' + err);
});
});
}
function getLogs(req, res) {
if (!req.body.id) {
return res.status(400).send('Please check the params!');
}
var date;
if (req.body.date) {
date = req.body.date;
} else {
date = new Date().toISOString().slice(0, 10);
}
var sqlQuery = "SELECT `LogFileID` FROM `logs_data` WHERE `EmpID` = '" + req.body.id + "' AND DATE(`Timestamp`) = '" + date + "'",
resArray = [];
hitThisQueryForMe(sqlQuery, res, function (rows) {
if (!rows.length) res.json(rows);
_.each(rows, (async function (item) {
console.log('item: ' + item.LogFileID);
var logFileUrlResponse = await (_getLogFileUrls(item.LogFileID));
resArray.push(logFileUrlResponse);
}));
res.send(resArray);
resArray = [];
});
}

Related

How do you get javascript promises to work with node + express and display on an EJS file?

Before saying to look at the docs, I have and they were not helpful in the slightest.
I have an web page with node as the backbone. On one page I need to request the past 10 images from NASA's Astronomy Picture of the Day (APOD) API and then after that, I need to request the next 5 upcoming launches from the Launch Library API (https://launchlibrary.net/docs/1.3/api.html). My problem is that not all the APODs will load (which I understand because of the nature of asynchronous requests).
Here is my concise app.js file for the Node backend:
app.get("/index", function(req, res) {
/**Requesting NASA's Astronomy Picture of the Day**/
var apod_url = "https://api.nasa.gov/planetary/apod?api_key=[My Key]"
var apod_img_urls = [];
var curr_moment = moment();
for(var i = 0; i < 10; i++) {
var appended_url = apod_url + "&date=" + curr_moment.subtract(i, "days").format("YYYY-MM-DD");
request(appended_url, function(error, reponse, body) {
if(!error && reponse.statusCode == 200) {
var img_json = JSON.parse(body);
if(img_json.media_type == "image") {
var apod_promise = new Promise(function(resolve, reject){
resolve(img_json.hdurl);
});
apod_img_urls.push(apod_promise);
}
} else {
console.log(error);
}
});
}
/**************************************************/
var url = "https://launchlibrary.net/1.3/launch?next=20&mode=verbose";
request(url, function(error, response, body) {
if(!error && response.statusCode == 200) {
var data = JSON.parse(body);
res.render("index", {data: data, apod_img_urls: apod_img_urls});
} else {
console.log(error);
}
});
});
Here is an EJS snippet
<% apod_img_urls.forEach(function(promise, index) { %>
<div class="carousel-item <%= (index == 0 ? 'active' : '') %>">
<div class="w-100 home-image" style="background-image: url('<%= promise.then(function(url) {return url}); %>')"></div>
</div>
<% }); %>
When I check in the source it shows that the background image urls for the divs are [object Promise]. The way I have it, no images show up. Also the number of divs displayed (i.e. the number of images I should have) is variable; sometimes it's 5, sometimes it's 3, and other times it's none. Could my problem be that I'm rendering the page inside of another request? Also how can I get the actual image URL to show up in the EJS file?
You are creating the promise too late, inside the asynchronous callback to reequest - fairly simple reorganisation of code required
Once all the promises are in an array, you then need to wait for them to complete, using Promise.all
app.get("/index", function(req, res) {
/**Requesting NASA's Astronomy Picture of the Day**/
var apod_url = "https://api.nasa.gov/planetary/apod?api_key=[My Key]"
var promises = [];
var curr_moment = moment();
for(var i = 0; i < 10; i++) {
var appended_url = apod_url + "&date=" + curr_moment.subtract(i, "days").format("YYYY-MM-DD");
promises.push(new Promise((resolve, reject) => {
request(appended_url, function(error, reponse, body) {
if(!error && reponse.statusCode == 200) {
var img_json = JSON.parse(body);
resolve(img_json.hdurl);
} else {
reject(error);
console.log(error);
}
});
}));
}
Promise.all(promises).then(apod_img_urls => {
var url = "https://launchlibrary.net/1.3/launch?next=20&mode=verbose";
request(url, function(error, response, body) {
if(!error && response.statusCode == 200) {
var data = JSON.parse(body);
res.render("index", {data, apod_img_urls});
} else {
console.log(error);
}
});
});
});
Rather than using request-promise and all the cruft it requires, you can always make your own promisified request function, and in this case, it rejects if status is anything other than 200
const requestP = url => new Promise((resolve, reject) => request(url, (error, response, body) => {
if (error) {
reject({error, response, body});
} else if (resonse.statusCode != 200) {
reject(resonse.statusCode);
} else {
resolve({response, body});
}
}));
now, your code can be written as:
app.get("/index", function(req, res) {
/**Requesting NASA's Astronomy Picture of the Day**/
const apod_url = "https://api.nasa.gov/planetary/apod?api_key=[My Key]"
const curr_moment = moment();
Promise.all(Array.from({length:10}, (_, i) => {
const appended_url = apod_url + "&date=" + curr_moment.subtract(i, "days").format("YYYY-MM-DD");
return requestP(appended_url).then(({response, body}) => JSON.parse(body).hdurl);
})).then(apod_img_urls => {
const url = "https://launchlibrary.net/1.3/launch?next=20&mode=verbose";
return requestP(url).then(({response, body}) => {
const data = JSON.parse(body);
return res.render("index", {data, apod_img_urls});
});
});
});
Note: there's a lot of ES2015+ going on in there
If you use a promise-returning request library like request-promise then you can do something like:
app.get("/index", function(req, res) {
var apod_url = "https://api.nasa.gov/planetary/apod?api_key=[My Key]"
var curr_moment = moment();
var urls = [];
for(var i = 0; i < 10; i++) {
urls[i] = apod_url + "&date=" + curr_moment.subtract(i, "days").format("YYYY-MM-DD");
}
Promise.all(urls.map(url => rp({ url, json: true})).then((results) => {
// here you have all results with JSON already parsed for you
// ...
}).catch((err) => {
// handle error
// make sure to return response to client
// ...
});
// ...
});
If you want to work with promises then use a promise-returning module like request-promise or axios instead of the standard request, see:
https://www.npmjs.com/package/request-promise
https://www.npmjs.com/package/axios
Or, alternatively, use bluebird.promisify or built-in util.primisify (since 8.0) to promisify the request module, see:
http://bluebirdjs.com/docs/api/promise.promisify.html
https://nodejs.org/dist/latest-v8.x/docs/api/util.html
For more options of promisifying request see:
https://www.npmjs.com/package/request#promises--asyncawait
Then, when you have a promise-returning request library, make an array of URLs however you want, then map that array with the request functions, like this:
let rp = require('request-promise');
let urls = ['http://...', 'http://...'];
let promises = urls.map(rp);
and then use Promise.all to wait for all of them to finish, while being done concurrently:
Promise.all(promises)
.then(...)
.catch(...);
or, if you're using async/await, then:
try {
let x = await Promise.all(promises);
...
} catch (err) {
...
}
Many of modules like axios or request-promise-json or request-promise will parse JSON for you if you run it correctly, see:
https://github.com/request/request-promise#get-something-from-a-json-rest-api
Avoid parsing JSON yourself but if you do then always put JSON.parse() inside of try/catch (or use my little tryjson module) - see those answers to know why:
Calling a JSON API with Node.js
Reading requests body Nodejs
Node JS ignores undefined check

Node.js web scraping with loop on array of URLs

I'm trying to build a little script to scrap some data. I'm some basics knowledge in javascript however I'm kind of lost with all the async callback or promises stuff. Here is what I have now :
url = "http://Blablablabla.com";
var shares = function(req, res) {
request(url, function (error, response, body) {
if (!error) {
var $ = cheerio.load(body),
share = $(".theitemIwant").html();
return res.send(url + ":" + share);
} else {
console.log("We've encountered an error: " + error);
}
})
}
So everything is fine with this piece of code. What I would like to do is :
Using an array of url var urls = [url1,url2,url3,etc...]
Storing my scrapped data into another array, something like this data = [{url: url1, shares: share},{url: url2, shares: share},etc...]
I know I need to use something like this data.push({ urls: url, shares: share})})
and I understand that I need to loop over my first url array to push data into my second data array.
however I'm kind of lost with the request method and the way I should deal with async issue in my situation.
thanks !
edit#1 :
I tried this to use promises :
var url = "www.blablabla.com"
var geturl = request(url, function (error, response, body) {
if (!error) { return $ = cheerio.load(body) } else
{ console.log("We've encountered an error: " + error); }
});
var shares = geturl.then( function() {
return $(".nb-shares").html();
})
but got the following error geturl.then is not a function
I think you should use async:
var async = require('async');
var urls = ["http://example.com", "http://example.com", "http://example.com"];
var data = [];
var calls = urls.map((url) => (cb) => {
request(url, (error, response, body) => {
if (error) {
console.error("We've encountered an error:", error);
return cb();
}
var $ = cheerio.load(body),
share = $(".theitemIwant").html();
data.push({ url, share })
})
})
async.parallel(calls, () => { /* YOUR CODE HERE */ })
You could do the same with promises, but I don't see why.
I took a stab at it. You need to install the q library and require it to
var Q = require('q');
//... where ever your function is
//start with an array of string urls
var urls = [ "http://Blablablabla.com", '...', '...'];
//store results in this array in the form:
// {
// url: url,
// promise: <will be resolved when its done>,
// share:'code that you wanted'
// }
var results = [];
//loop over each url and perform the request
urls.forEach(processUrl);
function processUrl(url) {
//we use deferred object so we can know when the request is done
var deferred = Q.defer();
//create a new result object and add it to results
var result = {
url: url,
promise: deferred.promise
};
results.push(result);
//perform the request
request(url, function (error, response, body) {
if (!error) {
var $ = cheerio.load(body),
share = $(".theitemIwant").html();
//resolve the promise so we know this request is done.
// no one is using the resolve, but if they were they would get the result of share
deferred.resolve(share);
//set the value we extracted to the results object
result.share = share;
} else {
//request failed, reject the promise to abort the chain and fall into the "catch" block
deferred.reject(error)
console.log("We've encountered an error: " + error);
}
});
}
//results.map, converts the "array" to just promises
//Q.all takes in an array of promises
//when they are all done it rull call your then/catch block.
Q.all(results.map(function(i){i.promise}))
.then(sendResponse) //when all promises are done it calls this
.catch(sendError); //if any promise fails it calls this
function sendError(error){
res.status(500).json({failed: error});
}
function sendResponse(data){ //data = response from every resolve call
//process results and convert to your response
return res.send(results);
}
Here is another solution I like a lot :
const requestPromise = require('request-promise');
const Promise = require('bluebird');
const cheerio = require('cheerio');
const urls = ['http://google.be', 'http://biiinge.konbini.com/series/au-dela-des-murs-serie-herve-hadmar-marc-herpoux-critique/?src=konbini_home']
Promise.map(urls, requestPromise)
.map((htmlOnePage, index) => {
const $ = cheerio.load(htmlOnePage);
const share = $('.nb-shares').html();
let shareTuple = {};
shareTuple[urls[index]] = share;
return shareTuple;
})
.then(console.log)
.catch((e) => console.log('We encountered an error' + e));

How to persist data through promise chain in NodeJS (Bluebird)

Follow-up to Swap order of arguments to "then" with Bluebird / NodeJS Promises (the posted answer worked, but immediately revealed a new issue)
This is the first time I've ever used promises in NodeJS so I apologize if some conventions are poorly adhered to or the code is sloppy. I'm trying to aggregate data from multiple APIs, put it in a database, then compute some statistics based on similarities and differences in the data. As a starting point I'm trying to get an API token for a single one of the APIs.
Here is my full code:
var Promise = require('bluebird');
var fs = require('fs');
var request = require('request');
Promise.promisifyAll(fs);
Promise.promisifyAll(request);
// tilde-expansion doesn't follow the callback(err, data) convention
var tilde = function(str) {
var _tilde = require('tilde-expansion');
return new Promise(function(resolve, reject) {
try {
_tilde(str, resolve);
} catch(e) {
reject(e);
}
});
}
var getToken = function() {
return request.getAsync(process.env.token_url, {
headers: {
"Content-Type": "applications/x-www-form-urlencoded"
},
form: {
client_id: process.env.client_id,
client_secret: process.env.client_secret,
grant_type: "client_credentials"
}
})
.then(function(resp) { return resp.body; });
}
var tokenFile = tilde(process.env.token_file)
.catch(function(err) {
console.log("Error parsing path to file... can not recover");
});
var token = tokenFile
.then(fs.readFileAsync) //, "utf8")
.then(function(data) {
console.log("Token (from file): " + data);
return data;
})
.then(JSON.parse)
.catch(function(err) {
console.log("Error reading token from file... getting a new one");
return getToken()
.then(function(data) {
console.log("Token (from API): " + data);
return data;
})
.then(JSON.stringify)
.then(fs.writeFileAsync.bind(null, tokenFile.value()));
});
token.then(function(data) {
console.log("Token (from anywhere): " + token.value);
});
This code is currently logging:
Token: undefined
if I fall back to the API. Assuming I did my promise stuff correctly (.catch() can return a promise, right?) then I would assume the issue is occurring because fs.writeFileAsync returns void.
I would like to append a .return() on the end of this promise, but how would I gain access to the return value of getToken()? I tried the following:
.catch(function(err) {
console.log("Error reading token from file... getting a new one");
var token = "nope";
return getToken()
.then(function(data) {
console.log("Token (from API): " + data);
token = data;
return data;
})
.then(JSON.stringify)
.then(fs.writeFileAsync.bind(null, tokenFile.value()))
.return(token);
});
However this logs "nope".
Over the weekend I continued my research on promises and upon making a pivotal realization I was able to develop the solution to this. Posting here both the realization and the solution:
The Realization
Promises were invented so that asynchronous code could be used in a synchronous manner. Consider the following:
var data = processData(JSON.parse(readFile(getFileName())));
This is the equivalent of:
var filename = getFileName();
var fileData = readFile(filename);
var parsedData = JSON.parse(fileData);
var data = processData(parsedData);
If any one of these functions is asynchronous then it breaks, because the value isn't ready on time. So for those asynchronous bits we used to use callbacks:
var filename = getFileName();
var data = null;
readFile(filename, function(fileData){
data = processData(JSON.parse(fileData));
});
This is not only ugly, but breaks a lot of things like stack traces, try/catch blocks, etc.
The Promise pattern fixed this, letting you say:
var filename = getFileName();
var fileData = filename.then(readFile);
var parsedData = fileData.then(JSON.parse);
var data = parsedData.then(processData);
This code works regardless of whether these functions are synchronous or asynchronous, and there are zero callbacks. It's actually all synchronous code, but instead of passing values around, we pass promises around.
The led me to the realization that: for every bit of code that can be written with promises, there is a synchronous corollary
The solution
Realizing this, I tried to consider my code if all of the functions were synchronous:
try {
var tokenFile = tilde(process.env.token_file)
} catch(err) {
throw new Error("Error parsing path to file... can not recover");
}
var token = null;
try {
token = JSON.parse(readFile(tokenFile));
} catch(err) {
token = getToken();
writeFile(tokenFile, JSON.stringify(token));
}
console.log("Token: " + token.value);
After framing it like this, the promise version follows logically:
var tokenFile = tilde(process.env.token_file)
.catch(function(err) {
throw new Error("Error parsing path to file... can not recover");
});
var token = tokenFile
.then(readFile)
.then(JSON.parse)
.catch(function(err) {
var _token = getToken();
_token
.then(JSON.stringify)
.then(writeFile.bind(null, tokenFile.value));
return _token;
});

Calling done on an array of http.get requests in Node.js

I have an array of URLs that I'm using a for loop to call http.get requests. Since this is an async process, I'd like to call done after ALL requests have returned.
Here is my current attempt:
grunt.registerTask('verify', function() {
var done = this.async();
var promises = [];
var urlPrefix = 'http://example.com/';
for(var i = 0; i < deployableFiles.length; i++) {
(function(i) {
var deferred = Q.defer();
promises.push(deferred);
var file = deployableFiles[i];
var path = file.filetype + '/' + getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
deferred.resolve();
if(res.statusCode === 200) {
grunt.log.oklns(path + ' was found on production server.');
} else {
grunt.log.error('Error! ' + path + ' was not found on production server!');
}
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
done();
});
})(i);
}
Q.all(promises)
.done(function() {
// Everything executed correctly
return done();
}, function(reason) {
// There was an error somewhere
return done(false);
});
});
I'm sure it's just me not wrapping my head around the whole async nature of node correctly, but is there anything glaringly obvious to anyone else?
I've searched about using http with the Q library, and it appears it might be required to use Q.nfcall to get this to work. I'm just having trouble seeing WHY I'd have to do that. (I'm not adverse to actually doing that, I'm more curious than anything else)
Thanks!
If this is not a typo, promises.push(deferred) should be pushed the promise promises.push(deferred.promise).
function foo() {
...
return defer.promise;
}
// => foo().then(function() ...);
Q.all([
foo(),
foo(),
...
]).done(function() ...);
Q.all expects an array of promises. https://github.com/kriskowal/q#combination
Q.nfcall is just sugar around that if
working with functions that make use of the Node.js callback pattern, where callbacks are in the form of function(err, result)
https://github.com/kriskowal/q#adapting-node
You should always perform promisification at the lowest level possible. That makes reasoning about concurrency a lot easier.
function getPing(url){
return new Q.Promise(function(resolve,reject){
http.get(url,function(res){
// note this will _not_ wait for the whole request
// but just the headers.
if(res.statusCode === 200) resolve();
else reject();
});
});
}
This would let you do:
grunt.registerTask('verify', function() {
var done = this.async();
var urlPrefix = 'http://example.com/';
var pings = deployableFiles.map(function(file){
var path = file.filetype + '/' +
getVersionedFileName(file.basename, file.filetype);
return getPing(urlPrefix + path);
});
Q.all(pings).then(done).catch(function(reason) {
// There was an error somewhere
// this will happen as soon as _one_ promise rejected
return done(false);
});
});
This can be further shortened by using a better promise library like Bluebird.
You can also do this with async:
var urlPrefix = 'http://example.com/';
async.each(deployableFiles, function(file, cb) {
var path = file.filetype
+ '/'
+ getVersionedFileName(file.basename, file.filetype);
http.get(urlPrefix + path, function(res) {
if (res.statusCode === 200)
grunt.log.oklns(path + ' was found on production server.');
else
grunt.log.error('Error! ' + path + ' was not found on production server!');
cb();
}).on('error', function(e) {
grunt.log.error("Got error: " + e.message);
cb(e);
});
}, function(err) {
// all done
if (err) throw err;
// all successful
});

Having trouble with promises in nodejs

I'm trying to use promises with nodejs (I'm trying with node-promise package); however, without any success. See the code below:
var express = require('express'),
request = require('request'),
promise = require('node-promise');
app.get('/promise', function(req, res) {
var length = -1;
new promise.Promise(request(
{uri: "http://www.bing.com"},
function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log("An error occurred when connected to the web site");
return;
}
console.log("I'll return: " + body.length);
length = body.length;
}
)).then(function(result) {
console.log("This is what I got: " + length);
console.log("Done!");
});
res.end();
});
The output of the above code is I'll return: 35857 only and it doesn't go to the then part.
I change the code then to be:
app.get('/promise', function(req, res) {
var length = -1;
promise.when(
request(
{uri: "http://www.bing.com"},
function (error, response, body) {
if (error && response.statusCode !== 200) {
console.log("An error occurred when connected to the web site");
return;
}
console.log("I'll return: " + body.length);
length = body.length;
}
),
function(result) {
console.log("This is what I got: " + length);
console.log("Done!");
},
function(error) {
console.log(error);
}
);
res.end();
});
This time the output is This is what I got: -1 then Done!... looks like the "promise" was not called this time.
So:
What's needed to be done to fix the code above? Obviously I'm not doing it right :)
Is node-promise "the way to go" when I'm doing promises, or is there a better way/package? i.e. simpler and more production-ready.
Thanks.
Try jquery-deferred-for-node.
I'm not an expert but understand that this lib tends to be favoured by programmers who work both server-side and client-side.
Even if you don't already know jQuery's Deferreds, the advantages of going this route are that :
the documentation is excellent (it comprises links to the jQuery docs), though you may struggle to find examples specific to Node.
methods are chainable.
jQuery Callbacks are also included.
when one day you need to do asynchronous stuff client-side, then there's virtually nothing to relearn - the concepts are identical and the syntax very nearly so. See the "Correspondances" section in the github page hyperlinked above.
EDIT
I'm not a node.js person so I'm guessing here but based on your code above, you might want to consider something along the following lines with jquery-deferred-for-node :
var express = require('express'),
request = require('request'),
Deferred = require('JQDeferred');
function fetch(uri, goodCodes) {
goodCodes = (!goodCodes) ? [200] : goodCodes;
var dfrd = Deferred(); // A Deferred to be resolved/rejected in response to the `request()`.
request(uri, function(error, response, body) {
if (!error) {
var isGood = false;
// Loop to test response.statusCode against `goodCodes`.
for (var i = 0; i < goodCodes.length; i++) {
if (response.statusCode == goodCodes[i]) {
isGood = true;
break;
}
}
if (isGood) {
dfrd.resolve(response.statusCode, body);
} else {
dfrd.reject(response.statusCode, "An invalid response was received from " + uri);
}
} else {
dfrd.reject(response.statusCode, "An error occurred attempting to connect to " + uri);
}
});
// Make promise derived from dfrd available to "consumer".
return dfrd.promise();
};
//...
app.get('/promise', function(req, resp) {
fetch("http://www.bing.com").done(function(statusCode, result) {
console.log("Done! This is what I got: " + result.length);
}).fail(function(statusCode, message) {
console.log("Error (" + statusCode + "): " + message);
});
resp.end();
};
Here, I have tried to write a generalized utility for fetching a resource in such a way that the asynchronous response (or error) can be handled externally. I think this is broadly along the lines of what you were trying to achieve.
Out of interest, where do console.log() messages end up with node.js?
EDIT 2
Above, I have given Deferred an initial capital, as is conventional for Constructors
With jQuery Deferreds, there must be any number of ways to fetch() consecutively. The approach below leaves fetch() as it was, and introduces fetch_() to act as its front-end. There may be simpler ways but this allows fetch() to remain a general utility, functionally equivalent to the client-side jQuery.ajax().
function fetch_(uri){
return function(){
return fetch(uri, [200]).then(function(statusCode, result){
console.log("Done! This is what I got: " + result.length);
},function(statusCode, message){
console.log("Error (" + statusCode + "): " + message);
});
};
}
Note that function fetch() returns a function. It has to be like this because where fetch() is called, we want an unexecuted function, not (yet) the result of that function.
Now let's assume an array of uris is available. This can be hard-coded or built dynamically - whatever the application demands.
var uris = [
'http://xxx.example.com',
'http://yyy.example.com',
'http://zzz.example.com'
];
And now, a variety of ways in which fetch_() might be called :
//v1. To call `resp.end()` when the fetching process starts.
app.get('/promise', function(req, resp) {
fetch_(uris[0])().then(fetch_(uris[1])).then(fetch_(uris[2]));
resp.end();
});
//v2. To call `resp.end()` when the fetching process has finished.
app.get('/promise', function(req, resp){
fetch_(uris[0])().then(fetch_(uris[1])).then(fetch_(uris[2])).always(resp.end);
});
//v3. As v2 but building a `.then()` chain of any (unknown) length.
app.get('/promise', function(req, resp){
var dfrd = Deferred().resolve();//
$.each(uris, function(i, uri){
dfrd = dfrd.then(fetch_(uri));
});
dfrd = dfrd.always(resp.end);
});
untested
I have more confidence in v1 and v2. v3 may work.
v2 and v3 should both give exactly the same behaviour but v3 is generalized for any number of uris.
Everything may need debugging.
I would recommend using Q: https://github.com/kriskowal/q. I believe that it's used internally by other frameworks (like jQuery deferred implementation).
I believe that the documentation is "fine"; the syntax is consistent with other promise implementations... and it has a node adapter.
So your deferred style approach:
var deferred = Q.defer();
FS.readFile("foo.txt", "utf-8", function (err, res) {
if (!err) {
deferred.resolve(res);
} else {
deferred.reject(err);
}
});
return deferred.promise;
Can be written more concisely as:
var deferred = Q.defer();
FS.readFile("foo.txt", "utf-8", deferred.makeNodeResolver());
return deferred.promise;

Categories

Resources