Express.js retrieving info from external API and rendering to site - javascript

I am kind of stuck on how to handle this solution. The frameworks I'm currently working with is node.js and express.js with pug for views/rendering html. What I am trying to do is render to a single page and use values from separate http get requests from different sites. I want express/node to query the external api's and render a view that I will retrieve via ajax call from the client side javascript. The ajax call using jquery from the client side I have figured out. My issue is how to handle multiple external API calls and place the results into an object to render in my pug view. I'm not sure what the best practice would be to execute this properly. Do I create a model? Also, how do I handle the asynchronous http gets? Use promises? I'm kind of new to node and javascript so I'm trying to see what the best conventions to use.
I hope this makes sense.
Thanks!!!!
apiInfoModel.js
var apiCallInfo = {
apiInfo1: APIDATA
apiInfo2: APIDATA2
apiInfo3: APIDATA3
}
Should I have a function that I call that would return the APIDATA?
apiCalls.js
function getApiInfo1() {
return http.get
}
function getApiInfo2() {
return http.get
}
function getApiInfo3() {
return http.get
}
apiInfoController.js
var apiInfo = require('./apiInfoModel')
var apiCalls = require('./apiCalls')
exports.apiInfo = function(req,res,next){
apiInfo.apiInfo1 = apiCalls.getApiInfo1
apiInfo.apiInfo2 = apiCalls.getApiInfo2
apiInfo.apiInfo3 = apiCalls.getApiInfo3
res.render('apiInfo',{apiInfo: apiInfo})
}

To expand on the comment by #Mauricio Noris Freire
apiCalls.js - Add callback parameters
function getApiInfo1(cb) {
return cb(http.get)
}
function getApiInfo2(cb) {
return cb(http.get)
}
function getApiInfo3(cb) {
return cb(http.get)
}
apiInfoController.js - nest the callbacks to have access to all the results
var apiInfo = require('./apiInfoModel')
var apiCalls = require('./apiCalls')
exports.apiInfo = function(req,res,next){
apiCalls.getApiInfo1(function(info1Result) {
apiCalls.getApiInfo2(function(info2Result) {
apiCalls.getApiInfo3(function(info3Result) {
// now you have all 3 results
apiInfoResult = {
apiInfo1: info1Result,
apiInfo2: info2Result,
apiInfo3: info3Result
}
res.render('apiInfo', { apiInfo: apiInfoResult })
});
});
});
}
This nested structure is referred to as the pyramid of doom because it keeps growing with every asynchronous action you need to do. It can be improved by using a utility library like async https://www.npmjs.com/package/async:
async.parallel( [
apiCalls.getApiInfo1,
apiCalls.getApiInfo2,
apiCalls.getApiInfo3
], function(error, apiInfoResult) {
apiInfoResult = {
apiInfo1: info1Result,
apiInfo2: info2Result,
apiInfo3: info3Result
}
res.render('apiInfo', { apiInfo: apiInfoResult })
});
But the currently recommended way is to use Promises. This is a new API introduced in JavaScript to handle this kind of situations. It's available in recent NodeJS versions. It removes the need for callbacks:
apiCalls.js - Return promises instead of using callbacks (the fetch library does this)
require('whatwg-fetch')
function getApiInfo1() {
return fetch()
}
function getApiInfo2() {
return fetch()
}
function getApiInfo3() {
return fetch()
}
apiInfoController.js - Use Promise.all.
const [
apiInfo1,
apiInfo2,
apiInfo3
] = Promise.all([apiCalls.getApiInfo1, apiCalls.getApiInfo2, apiCalls.getApiInfo3]);
apiInfoResult = { apiInfo1, apiInfo2, apiInfo3 }
res.render('apiInfo', { apiInfo: apiInfoResult })

Related

AngularJS $q.all - wait between http calls

So I have a situation where I need to perform a bunch of http calls, then once they are complete, continue on to the next step in the process.
Below is the code which does this and works fine.
However, I now need to wait a few seconds between each of the http calls. Is there a way to pass in a timeout with my current set up, or will it involve a good bit of refactoring?
Can post more code if needs be. I have tried passing in a timeout config varable into the http call, however, they still get fired at the same time.
Any advice would be great.
Code
var allThings = array.map(function(object) {
var singleThingPromise = getFile(object.id);
return singleThingPromise;
});
$q.all(allThings).then(function() {
deferred.resolve('Finished');
}, function(error) {
deferred.reject(error);
});
Instead of using $q.all, you might want to perform sequential calls one on success of previous and probably with use of $timeout. Maybe you could build a recursive function.
Something like this..
function performSequentialCalls (index) {
if(angular.isUndefined(array[index])) {
return;
}
getFile(array[index].id).then(function() {
$timeout(function() {
performSequentialCalls(index + 1)
}, 1000) // waiting 1 sec after each call
})
}
Inject required stuff properly. This assumes array to contain objects with ids using which you perform API calls. Also assumes that you are using $http. If using $resource, add $promise accordingly.
Hope that helps a bit!
function getItemsWithDelay(index) {
getFile(object[index].id).then(()=>{
setTimeout(()=>{
if(index+1 > object.length) { return }
getItemsWithDelay(index+1)
}, 5000)
})
}
You can make sequential calls
This is a awesome trick question to be asked in an interview, anyways I had a similar requirement and did some research on the internet and thanks to reference https://codehandbook.org/understanding-settimeout-inside-for-loop-in-javascript
I was able to delay all promise call in angularjs and the same can be applied in normal JS syntax as well.
I need to send tasks to a TTP API, and they requested to add a delay in each call
_sendTasks: function(taskMeta) {
var defer = $q.defer();
var promiseArray = [];
const delayIncrement = 1000 * 5;
let delay = 0;
for (i = 0; i < taskMeta.length; i++) {
// using 'let' keyword is VERY IMPORTANT else 'var' will send the same task in all http calls
let requestTask = {
"action": "SOME_ACTION",
"userId": '',
"sessionId": '',
};
// new Promise can be replaced with $q - you can try that, I haven't test it although.
promiseArray.push(new Promise(() => setTimeout(() => $http.post(config.API_ROOT_URL + '/' + requestTask.action, requestTask), delay)));
delay += delayIncrement;
}
$q.all(promiseArray).
then(function(results) {
// handle the results and resolve it at the end
defer.resolve(allResponses);
})
.catch(error => {
console.log(error);
defer.reject("failed to execute");
});
return defer.promise;
}
Note:: using 'let' keyword in FOR loop is VERY IMPORTANT else 'var' will send the same task in all http calls - due to closure/context getting switched

the complications in javascript node.js?

I've learned node.js and javascript lately. I loved node.js a lot, but I am working on a project coded in node.js, mongodb, cordova etc. I notice that I needed to use Promise Object in the code a lot.
I create a module in the project to query the db and bring results. In every exported function I need to declare a local function, then use promise, for example:
I have the following local functions in the Module:
var Initialize = function() {
return new Promise(function(resolve, reject) {
try {
MongoClient.connect("db_url_conn", function(err, database) {
if (err) return console.log(err)
db = database;
return resolve(db);
})
} catch (e) {
console.error(e);
}
});
};
then in every exported function in the module I needed to use:
mongoOperation.prototype.getLength = function() {
Initialize(function(db) {
return db;
}).then(function(db) {
getSize(db).then(function(length) {
console.log(length);
});
});
}
The Question is:
Is that normal according to the nature of node.js and JavaScript nature to use promise a lot?
Do I have any other choices to fulfill that?
Since MongoClient.connect() already returns a promise, you can simplify your code:
var Initialize = function() {
return MongoClient.connect("db_url_conn");
};
...
Initialize().then(function(db) { ... });
However, this will create a new client each time you call Initialize, where you should be reusing the client for better performance and to leverage the built-in connection pool:
var client = MongoClient.connect("db_url_conn");
var Initialize = function() { return client };

Which is the "right" way to handle a response that came late

Lets say that we have two buttons, each on are calling the following method:
var NUMBER_OF_IMAGE_REQUEST_RETRIES = 3;
var IMAGE_REQUEST_TIMEOUT = 3000;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
var requestsCounter = -1;
requestImage = function() {
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage();
} else {
return main_response.reject();
}
});
};
requestLabelsImage();
return main_response.promise;
}
The method passes an image related data to the server, the server process the data and then response. Every time a user press a different button different image_data is being send to the server.
The problem:
The user press button 1, the method is called with image_data_1, and then he/she immediately press button 2 and the method is called with image_data_2. The processImage function is called by another method, lets say doSomethingWithTheResponse which only cares about the latest user's action, but the image_data_2 is proceed faster by the servers, so the client gets image_data_2 before image_data_1, so the client believes that image_data_1 was related to the user's latest action, which is not the case. How can we ensure that the client is always getting the response that is related to the users latest action?
Note: The hash is different for the differente image_data requests.
I was thinking something like:
var oldhash = null;
processImage: function(image_data) {
var main_response = $q.defer();
var hash = getImageHash(image_data);
oldhash = hash;
var requestsCounter = -1;
requestImage = function(hash) {
if(hash === oldhash){
$http.post(apiUrl, {params: {data: hash},timeout: IMAGE_REQUEST_TIMEOUT})
.then(function(response) {
return main_response.resolve(response.data);
}, function(error) {
if (++requestsCounter < NUMBER_OF_IMAGE_REQUEST_RETRIES) {
requestLabelsImage(hash);
} else {
return main_response.reject();
}
});
}
else {
main_response.reject();
}
}
requestLabelsImage(hash);
return main_response.promise;
}
But I am not 100% sure that this is the right approach.
Simply disregard the previous requests.
You can create a repository of requests (array or dictionary implementation is okay). Call .abort() on the previous ones once another request is made -- when you add it in your storage.
If you want a dictionary, there is a good example here (tackles a different topic, though), but here is a modified snippet of his code which is related to your case:
var _pendingRequests = {};
function abortPendingRequests(key) {
if (_pendingRequests[key]) {
_pendingRequests[key].abort();
}
}
Where the key can be.. say... a category of your action. You can name constants for it, or it can be just the name of the button pressed. It can even be a URL of your request; completely up to you.
There is an excellent explanation of the whole concept here:
jquery abort() ajax request before sending another
https://stackoverflow.com/a/3313022/594992
If your UI allows for initiation multiple actions, while processing of those actions are mutually exclusive, then you should probably use promises, and track active promises.
button1.addEventListener("click", function(evt) {
startRunning( task1.start() );
});
button2.addEventListener("click", function(evt) {
startRunning( task2.start() );
});
With a task runner like:
function startRunning( promise ) {
while(runningTasks.length>0) {
cancel( runningTasks.unshift() );
});
runningTasks.push( promise );
}
Your cancel function can come from anything that can deal with promises, like Angular's service.cancelRequest, or you can write your own code that takes the promise and smartly breaks off its operation.
Of course, if you're not using promises, then you probably want to start doing so, but if you absolutely can't you can use a manager object like:
button1.addEventListener("click", function(evt) { task1(); });
button2.addEventListener("click", function(evt) { task2(); });
with
var manager = [];
function cancelAll() {
while(manager.lenght>0) {
var cancelfn = manager.unshift()
cancelfn();
}
return true;
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something1, function(result) {
if(!running) return;
// do your real thing
});
}
function task1() {
var running = cancelAll();
manager.push(function() { running = false; });
asyncDo(something2, function(result) {
if(!running) return;
// do your real thing
});
}
And you can put cancels on as many aspects as you need. If you need to cancel running XHRs, you might be able to do so, if you have multiple steps in your result handling, cut off at each step start, etc.
This sounds like an ideal use-case for promises. Basically, whenever a new request is made, you want to cancel any existing promises. I am not versed in AngularJS, but the following ng-specific links might prove useful:
Angularjs how to cancel resource promise when switching routes
Canceling A Promise In AngularJS

Node module: Don't return until all async requests have finished

I'm new to node and am having trouble understanding node's async behavior. I know this is a very frequently addressed question on SO, but I simply can't understand how to get any of the solutions I've read to work in my context.
I'm writing this module which I want to return an object containing various data.
var myModule = (function () {
var file,
fileArray,
items = [],
getBlock = function (fileArray) {
//get the data from the file that I want, return object
return block;
},
parseBlock = function (block) {
//[find various items in the block, put them into an "myItems" object, then
//take the items and do a look up against a web api as below]...
for (var i = 0, l = myItems.length; i < l; i ++) {
(function (i) {
needle.post(MY_URL, qstring, function(err, resp, body){
if (!err && resp.statusCode === 200){
myItems[i].info = body;
if (i === (myItems.length -1)) {
return myItems;
}
}
});
})(i);
}
},
getSomeOtherData = function (fileArray) {
//parse some other data from the file
}
return {
setFile: function (file) {
fileArray = fs.readFileSync(file).toString().split('\n');
},
render: function () {
var results = [];
results.someOtherData = getsomeOtherData();
var d = getBlock();
results.items = parseBlock(d);
return results;
}
}
})();
When I call this module using:
myModule.setFile('myFile.txt');
var res = myModule.render();
the variable res has the values from the someOtherData property, but not the items property. I understand that my long-running http request has not completed and that node just zooms ahead and finishes executing, but that's not what I want. I looked at a bunch of SO questions on this, and looked at using Q or queue-async, but with no success.
How do I get this module to return no data until all requests have completed? Or is that even possible in node? Is there a better way to design this to achieve my goal?
The problem in your example is your calling getBlock() but you have declared your function as getBlockData(). So you will not get a result. Try changing it to both the same.
Presuming that you have them both the same, your next problem is that your processing data from a file, so I presume that your reading the contents of the file and then parsing it.
If this is the case then there are sync reads that you can use to force sync, however I wouldn't recommend this.
You really want to structure your program based on events. Your thinking in the paradigm of 'call a function, when it returns continue'. You need to be thinking more along the lines of 'call a process and add a listener, the listener then does reply handling'.
This works very well for comms. You receive a request. You need to reply based on contents of file. So you start the read process with two possible results. It calls the completed function or the error function. Both would then call the reply function to process how to handle a reply for the request.
It's important not to block as you will be blocking the thread via which all processes are handled.
Hope that helps, if not add some comments and I will try and elaborate.
Have a look at this answer to another question to see a good example of processing a file using the standard listeners. All async calls have a listener concept for what can happen. All you need to do is pass a function name (or anon if you prefer) to them when you call them.
A quick example (based on node.js stream.Readable API:
fs.createReadStream(filename, {
'flags': 'r'
}).addListener( "data", function(chunk) {
// do your processing logic
}).addListener( "end", function(chunk) {
// do your end logic
response(...);
}).addListener( "error", function(chunk) {
// do your error logic
response(...);
}).addListener( "close",function() {
// do your close logic
});
function response(info) {
}

angularjs - $http reading json and wait for callback

I am trying to read data from json and wait until data will be fetched into $scope.urls.content. So I write code:
$scope.urls = { content:null};
$http.get('mock/plane_urls.json').success(function(thisData) {
$scope.urls.content = thisData;
});
And now I am trying to write something like callback but that doesn't work. How can i do that? Or is there any function for this? I am running out of ideas ;/
Do you mean that ?
$http.get('mock/plane_urls.json').success(function(thisData) {
$scope.urls.content = thisData;
$scope.yourCallback();
});
$scope.yourCallback = function() {
// your code
};
You want to work with promises and $resource.
As $http itself returns a promise, all you got to do is to chain to its return. Simple as that:
var promise = $http.get('mock/plane_urls.json').then(function(thisData) {
$scope.urls.content = thisData;
return 'something';
});
// somewhere else in the code
promise.then(function(data) {
// receives the data returned from the http handler
console.log(data === "something");
});
I made a pretty simple fiddle here.
But if you need to constantly call this info, you should expose it through a service, so anyone can grab its result and process it. i.e.:
service('dataService', function($http) {
var requestPromise = $http.get('mock/plane_urls.json').then(function(d) {
return d.data;
});
this.getPlanesURL = function() {
return requestPromise;
};
});
// and anywhere in code where you need this info
dataService.getPlanesURL().then(function(planes) {
// do somehting with planes URL
$scope.urls.content = planes;
});
Just an important note. This service I mocked will cache and always return the same data. If what you need is to call this JSON many times, then you should go with $resource.

Categories

Resources