Building an object from multiple asynchronous sources in node.js/express.js - javascript

I am having a tough time finding a solution to my problem online and was hoping someone on here might be able to help me. I have an express route that does a few API requests for different JSON objects. I would like to build a JSON response for my client side view but all of my attempts so far either yield previous request data or no data at all.
So my question to you JavaScript pros using node/express js. How do you sync up multiple sources of JSON objects into one single object to be returned to the client side in one response? Is there a library or some callback magic that you use?
Thanks in advance for any and all help!

Async is one of the more popular libraries for this purpose. There are many other async and promise libraries that can help with this. There are different methods that have different behaviors depending on what you need. I think series method is what you need, but check the documentation carefully.
var async = require('async');
var request = require('request');
app.get('endpoint',function(req, res){
async.series([
function(callback){request.get('url',callback)},
function(callback){request.get('url2',callback)},
function(callback){request.get('url'3,callback)},
],
function(err,results){
//handle error
//results is an array of values returned from each one
var processedData = {
a: results[0],
b: results[1],
c: results[2]
};
res.send(processedDAta)
})
})
You could also do this yourself (and is good practice for learning how to organize your node.js code).. callbackhell is a good write up of using named functions and modules to organize callbacks.
Here is a one possible way to do it. (Not tested)
app.get('/endpoint',function(req, res){
var dataSources = ['url1', 'url2',url3];
var requestData = [];
processRequestData = function(){
//do you stuff here
res.send(processedData);
};
dataSources.forEach(function(dataSource){
request.get(dataSource,function(err,result){
//handle error
requestData.push(result);
if(requestData.length == dataSources.length){
processRequestData();
}
})
})
});

Async.js(https://github.com/caolan/async) can help with tasks like this. A quick example:
app.get('/resource', function(req, res){
// object to send back in the response
var results = {};
// helper function to make requests and handle the data to results object
var getData = function(url, propertyName, next){
http.get(url, function(err, data){
// let async.js know if there is an error
if(err) return next(err);
// save data to results object
results[propertyName] = data;
// async.js needs us to notify it when each operation is complete - so we call the callback without passing any data back to
next();
});
};
// array of operations to execute in series or parallel
var operations = [];
operations.push(function(next){
getData('http://url-one', 'users', next);
});
operations.push(function(next){
getData('http://url-two', 'posts', next);
});
// async.js has a few options on how to execute your operations - here we use series
async.series(operations, function(err){
if(err){
throw err;
}
// if we get to this point, all of the operations have exectued and called
// next() without any errors - send the response with a populated results object.
res.send(results);
});
});
I haven't actually tried this code yet but it should give you a good idea on how to do it.

Related

How to prevent race condition in node.js?

Can someone explain me how to prevent race conditions in node.js with Express?
If have for example this two methods:
router.get('/addUser/:department', function(req, res) { ...})
router.get('/deleteUser/:department', function(req, res) { ...})
Both functions are using a non blocking I/O Operation ( like writing to a file or a database).
Now someone calls 'addUser' with Department 'A' and someone tries to delete all users with department 'A'. How can I solve this (or other similar) race conditions?
How can I solve the problem if every user has its own file/database-record?
How can I solve the problem if I have a single user (filesystem) file that I have to read alter and write again?
Note: This is just an example for understanding. No optimization tipps needed here.
To archive this goal, you need to implement a communication within the two services.
This can be done with a simple queue of operations to process each request in order.
The counter effect is that the request waiting for the queue will have a delayed response (and may occur timeout).
A simple "meta" implementation is:
const operationQueue = new Map();
const eventEmitter = new events.EventEmitter();
router.get('/addUser/:department', function(req, res) {
const customEvent = `addUser-${new Date().getTime()}`;
const done = () => {
res.send('done');
operationQueue.delete(customEvent);
};
eventEmitter.once(customEvent, done);
operationQueue.set(customEvent, () => addUser(customEvent, req));
})
router.get('/deleteUser/:department', function(req, res) {
const customEvent = `deleteUser-${new Date().getTime()}`;
const done = () => {
res.send('done');
operationQueue.delete(customEvent);
};
eventEmitter.once(customEvent, done);
operationQueue.set(customEvent, () => deleteUser(customEvent, req));
})
function addUser(customEvent, req){
// do the logic
eventEmitter.emit(customEvent, {done: true});
}
function deleteUser(customEvent, req){
// do the logic
eventEmitter.emit(customEvent, {done: true});
}
// not the best performance
setInterval(()=>{
const process = operationQueue.shift();
if(process) {
process();
}
}, 1);
Of course, if you'll use tools like a DB or a Redis queue it could fit better than this solution in terms of robustness and failover.
(This is a very broad question.)
Typically, one would use a database (instead of regular text files) and make use its in-built locking mechanisms.
Example of locking mechanisms in the Postgres database management system: https://www.postgresql.org/docs/10/static/explicit-locking.html

View requires multiple API endpoints -- how can I speed up requests in non-blocking fashion?

I need to render a view that depends on multiple API endpoints. What is the best practice -- the waterfall of callbacks seems slow. My only thought is I could have each request reference a callback that waits until all three are complete before rendering -- that way, I can initialize all GET in a non-blocking fashion.
API is just a module that makes calls to the API with 'GET' requests and returns result.
Thank you in advance!
app.get('/:name/:id', function(req,res){
var name = req.param('name'),
id = req.param('id');
api.getName(name, id, function(err,Name){
if (err)
return res.render('404', { error : "Non-existent"});
api.getNameComments(name, id, function(err, comments){
api.getNameLikes(name,id,function(err,likes){
res.render('project', {
project: project.response,
comments: comments.response,
likes: likes.response
});
});
});
});
});
As long as the calls don't depend on each other, you can run them all in parallel using the async library, https://github.com/caolan/async .

Issuing internal express request

I'm curious if there is any way to issue an internal request in express without going through all the actual overhead of a real request. An example probably shows the motivation better:
app.get("/pages/:page", funciton(req, res)
{
database_get(req.params.page, function(result)
{
// "Page" has an internal data reference, which we want to inline with the actual data:
request(result.user_href, function(user_response)
{
result.user = user.response.json;
res.send(result);
});
});
});
/// ....
app.get("/user/:name", function() ... );
So what we have here is a route whose data requires making another request to get further data. I'd like to access it by just doing something like app.go_get(user_href) instead of the heavy weight actual request. Now, I've asked around and the going strategy seems to be "split out your logic". However, it actually requires me to duplicate the logic, since the recursive data is referenced properly through URLs (as in the example above). So I end up having to do my own routing and duplicating routes everywhere.
Can you avoid the overhead of a real request? No. If you need the href from the first request in order to go to get a user object, you absolutely need to follow that link by making a second "real request."
If you have a database of users, you CAN avoid the request by including the user's ID on the page, and making a regular database call instead of following your own href.
Demo refactor on splitting out logic:
// Keep as little logic as possible in your routes:
app.get('/page/:page', function(req, res){
var pageId = req.params.page;
makePage(pageId, function(err, result){
if(err){ return res.send(500) }
res.send(result)
})
})
// Abstract anything with a bunch of callback hell:
function makePage(pageId, callback){
database_get(pageId, function(result) {
// Since it's only now you know where to get the user info, the second request is acceptable
// But abstract it:
getUserByHref(result.user_href, function(err, data){
if(err){return callback(err)};
result.user = data.json;
callback(null, result);
});
});
}
// Also abstract anything used more than once:
function getUserByHref(href, callback){
request(href, function(err, response, body){
if(response.statusCode != 200){
return callback(err);
}
var user = JSON.parse(body);
return callback(null, user);
})
}
// It sounds like you don't have local users
// If you did, you would abstract the database call, and use getUserById
function getUserById(id, callback){
db.fetch(id, function(err, data){
return callback(err, data);
})
}
I've made a dedicated middleware for this uest, see my detailed answer here: https://stackoverflow.com/a/59514893/133327

NodeJs, pattern for sync developement

I am new with NodeJs and Express frameworks. I have understood that Node works with only one thread on the server side. So, I have noticed this causes me some problems in order to develop correctly my application.
In my routes folder, I have a file index.js.
This file manage the navigation asked by the user from app.js.
So I decided to create a route function "test".
In this function, I had just that code
exports.test = function(req, res){
res.render('test', {});
};
So simple, so easy. That's rend the template test.jade in my views folder. Greats !
But I wanna complexify the process. In this test route function, I want load some content from my MYSQL database.
For that, I have created a folder Models in the folders node_modules
Inside, I have only 2 file, the first mysqlConnection.js which exports the variable DB in order to make queries.
var mysql = require('mysql');
var DB = mysql.createConnection(
{
host : 'localhost',
user : 'root',
password : '',
database : 'test',
}
);
DB.connect();
module.exports = DB;
In the second file, articles_class.js, I just have
var DB = require('models/mysqlConnection');
var Article = function() {
this.getArticles = function()
{
DB.query('SELECT * FROM articles;', function(err, rows, fields)
{
if (err)
throw err;
else
{
console.log(rows);
return (rows);
}
});
}
}
module.exports = Article;
Go back in my route test function :
I just want to load from table "test" all the articles. Very basic. But not easy.
Why ?
Because before the query is finished, NodeJs respond to the client with the template render, but, unfornlty, without the rows loaded. Asynchronous problem ... Mysql doesn't block the Nodejs javascript Instruction.
The code of the function :
exports.test = function(req, res){
var Article = require('models/articles_class');
a = new Article();
articles = a.getArticles();
console.log(articles); // undefined
res.render('test', {});
};
I Found others subjects in stackoverflow which speak about this problem. Make sync queries, work with callbacks ect ..
But for, here, if I try to manage this problem with callbacks, That's cannot work ... Because I need to send to the client the template with articles but I can't block the process with a sync method.
I am very lost ... I don't understand how I have to build my application. I am not able to create a good proceed in order to manage the sql queries. There is a pattern or a specific method ?
Or perhaps I have to make only ajax requests from the client. I load the template "test". And in a javascript file in the public folder, I ask to the server to load me the articles content and wait success callback function ? it's not very clean ...
Thx for your answers. The others answers I have found didn't help me to understand how manage that with NodeJs.
Pass a callback to getArticles:
exports.test = function(req, res){
var Article = require('models/articles_class');
a = new Article();
a.getArticles( function( articles ) {
console.log(articles); // undefined
res.render('test', { articles: articles });
});
};
Changes to your get articles function:
var DB = require('models/mysqlConnection');
var Article = function() {
this.getArticles = function( callback )
{
DB.query('SELECT * FROM articles;', function(err, rows, fields)
{
if (err)
throw err;
else
{
console.log(rows);
callback && callback(rows);
}
});
}
}
module.exports = Article;
Express will only return the template through the open http connection once res.render() is called. So it's just a matter of passing it as a callback through your call stack, so it should only be called after you have your database rows.
As we are working with callbacks, they don't block your application.

Meteor Compensate for Asynchronousity

I'm trying to make a method in Meteor which reads a file and outputs the result. Below is my code.
Server:
Meteor.methods({
retTemplate: function(templateName){
var fs = Npm.require('fs');
var ret;
fs.readFile("./../../../../../client/" + templateName + ".html",{encoding: 'utf8'}, function(err, data) {
if(err)
return "ERROR";
console.log(data);
return data;
});
}
});
Client:
Meteor.call('retTemplate', submitName, Meteor.user(), function(e, r){
console.log(r);
$('#editTempData').val(r);
});
The code on the server side works (the server-side console is updated with the content of the html file), but the method is returning undefined to the client. I believe this is due to Meteor's asynchronous functions. Is there a way around this? I've been fiddling around for a bit to no avail.
Thanks.
EDIT: I think it would work if there weren't a readFile method called, because that sets up a bit of a double-asynchronous call, which might be the problem, but I don't know how to fix it.
You can't return a value from an asynchronous method and then return it to the client like that. Here is an alternative (but not ideal) solution.
Create a new Meteor.Collection on the client/server and simply subscribe subscribe/publish this collection. Save the data returned from fs.readFile() into your collection, and your client will be automagically notified when this occurs.
I found the answer. I needed to use Futures.
Working server:
Meteor.methods({
retTemplate: function(templateName){
var Future = Npm.require('fibers/future');
var fut = new Future();
var fs = Npm.require('fs');
var ret;
fs.readFile("./../../../../../client/" + templateName + ".html",{encoding: 'utf8'}, function(err, data) {
if(err)
return "ERROR";
console.log(data);
fut['return'](data);
});
return fut.wait();
}
});

Categories

Resources