How to actually intercept res.render - javascript

I want to intercept every call to render, do my own stuff, and then proceed with the original render method. I know this can easily be done through a middleware like this:
function (req, res, next) {
var _render = render;
res.render = function () {
// custom stuff
_render.apply(this, arguments);
}
next();
}
However it seems more efficient to just change the prototype of the response object instead of replacing res.render on every request. I tried such a solution with no success. When logging http.ServerResponse.prototype there's no trace of any render method.
Finally i've tried to just intercept app.render instead, like this:
var _render = app.render;
app.render = function () {
// this is refering to app instead of res...
_render.apply(this, arguments);
}
That does fulfill my criteria of only being done once, but it is called on the app object and not the res object which means I can't access the res or req objects.
Basically what I think I would like to do is something like:
var _render = something.response.render;
something.response.render = function (view, data, callback) {
// Access res.*, as this.*
_render.call(this, view, data, callback);
};
Any ideas how to achieve that?

I guess that it depends on Express version. Try this for 3.x:
var response = require( "express" ).response;
var _render = response.render;
response.render = function( ) {
// do your stuff
_render.apply( this, arguments );
};

Related

sails.js node.js Parse JSON on controller

In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.

Return object from within Callback function

This question might sound duplicate of the one here but my scenario is quiet different. I have getSystemIds.js file like this:
var system_ids_list = {};
var getSystemIDs = function(req, callback) {
var client = //creating an object using internally developed node library
client.request('sql_parameter', {'Query.ID' : query_number},
function(err, req, res){
//Do some stuff to calculate necessary values
system_ids_list[key_value] = value_array;
i+= 1;
}
return(req, callback)
}
)
};
module.exports = getSystemIDs;
Now, as shown in the answer in the link above, I am doing this in app.js
appSSL.get('/sysids', function(req, res) {
var sys_ids = system_ids_list(req, function(err, sys_ids) {
res.render('sysids', sys_ids);
})
});
I am not getting any specific error but the web page never loads as if something is stuck in the process or it does not know where to go next. Can someone help me figure out what would be the best way to do this?
Your getSystemIds() function is never calling the callback that was passed to it so the caller of getSystemIds() never gets a result - thus nothing ever happens on the request.
Change it to this:
var system_ids_list = {};
var getSystemIDs = function (req, callback) {
var client = //creating an object using internally developed node library
client.request('sql_parameter', {'Query.ID': query_number}, function (err, req, res) {
//Do some stuff to calculate necessary values
system_ids_list[key_value] = value_array;
i += 1;
// call the callback now to communicate back the async results
callback(null, system_ids_list);
});
};
module.exports = getSystemIDs;
The way you have your code structured, system_ids_list will accumulate more and more values each time getSystemIDs() is called. That seems a bit of an odd way to structure things so I'm pointing that out in case that is not really what you intend.
Also, your getSystemIDs() function does not return anything so you should change this:
appSSL.get('/sysids', function(req, res) {
var sys_ids = system_ids_list(req, function(err, sys_ids) {
res.render('sysids', sys_ids);
});
});
to this to make it less missleading about what is going on:
appSSL.get('/sysids', function(req, res) {
system_ids_list(req, function(err, sys_ids) {
res.render('sysids', sys_ids);
});
});
And, if res.render() is from a system like ExpressJS, then you probably want to be passing an object and naming a template:
res.render('sometemplate.html', {sysids: sys_ids});
If you want system_ids_list to not accumulate values, but to return a fresh value each time, you can define it within your function like this:
var getSystemIDs = function (req, callback) {
var system_ids_list = {};
var client = //creating an object using internally developed node library
client.request('sql_parameter', {'Query.ID': query_number}, function (err, req, res) {
//Do some stuff to calculate necessary values
system_ids_list[key_value] = value_array;
i += 1;
// call the callback now to communicate back the async results
callback(null, system_ids_list);
});
};
module.exports = getSystemIDs;

Node.js with mocha test

if I have a function as the following:
function foo(request,response){
var val = request.param('data');
//code here
});
how can I create a mocha test function for this to pass the request and response parameters.
The function you wrote above can be seen as a controller - it handles the request and gives back a response.
There are few things you can do:
You can test the route itself - make a http request to the endpoint which uses this controller and test if it behaves correctly - you can use request/supertest/superagent libraries for example.
You can mock the request and response objects and test the code directly - it doesn't require a server to be started, but you need to spend some time to mock out the objects correctly.
It depends on what your "code here" does, and what you want to do :
Test the logic
If you can separate the code in a method that accepts "val", and returns a result, then just test that.
Usually, getting the params from a request, and passing the results to a response is a no-brainer, and not worth testing.
foo : function (req, res) {
// Do you really need to test that ?
var data = req.param("data");
// You probably want to test that
var bar = doFooLogic(data);
// Do you really need to test that ?
res.json(bar);
},
doFooLogic : function (data) {
...
}
And a test like :
describe("foo's logic", function () {
it("does stuff", function () {
// Just test the logic.
// This assumes you exposed the doFooLogic, which is probably acceptable
var bar = doFooLogic(42);
assert(bar.xxxx); // Whatever
});
});
Test the req/response function:
If you really want that, if you're just using "param" on the request object, you might be able to easily mock the request / response (this is JS, you just need to pass something that has the same functions available) :
describe(..., function () {
it("does whatever", function () {
var mockRequest = {
param : function (key) {
if (key === "data") {
return 42;
} else {
throw new Error("UNexpected key", key)
}
}
}
var mockResponse = {
// mock whatever function you need here
json : function (whatever) {
assert(whatever.xxxx) // compare what you put into the response, for example
}
}
// Then do the call
foo (mockRequest, mockResponse);
// The hard part is then how to test the response was passed the right stuff.
// That's why testing the logic is probably easier.
I think you can simply mock it with something like Sinon.js . It should be something like this:
describe('...', function( done ){
it('should test something', function(done){
var mock = sinon.stub(request, "param").withArgs("data").returns("Whatever");
var val = request.param('data');
//do your logic with that value
assert.equal(/*whatever value you want check*/);
mock.restore();
done();
}
}
And you don't have to take care about request's content.

Nodejs delay return for "require"

My setup is as follows:
Nodejs Server
server.js requires utils.js
utils.js loads data from mongodb into memory and exports it
server.js uses a variable that utils.js exports
The issue that I am worried about is the fact that the mongodb call is asynchronous. utils.js returns before the mongodb call is finished, meaning that server.js will use an undefined variable when it continues execution after the require.
What is the best to address this issue? The only thing I could think of is wrapping my server.js code in a giant callback and pass that to the function that makes the mongodb call. It seems a bit messy to me, is there a better way to do it?
Code:
server.js
var utils = require("./modules/utils.js");
console.log(utils);
//Do whatever
utils.js
var mods = [];
var db = require("mongojs").connect("localhost", ["modules"]);
db.modules.find({}, function(err, modules){
mods = modules;
});
module.exports = mods;
What you're referring to is called "callback hell". The easiest way to get out of that is to use a Promise library that simplifies it.
I used a node package called bluebird.
var mysql = require("mysql");
var hash = require("password-hash");
var Promise = require("bluebird");
var settings = require("../settings");
Promise.promisifyAll(require("mysql/lib/Connection").prototype);
Promise.promisifyAll(require("mysql/lib/Pool").prototype);
var db_config = {
user:settings.db.user,
password:settings.db.password,
database:settings.db.database
};
var con = mysql.createPool(db_config);
function query(sql) {
return con.getConnectionAsync().then(function(connection) {
return connection.queryAsync(sql)
.spread(function(rows,fields) {
return rows;
}).finally(function() {
connection.release();
});
});
}
This is a very basic database module I wrote that uses bluebird to promisify the database object.
And here's how it's used. It returns a promise! The benefit here is that not only does it return the clutter of callback hell, it makes sure that your code runs asynchronously and the function does not return before things have stopped processing, like in this case, a database query.
function login(user) {
//check for player existance
var query = 'SELECT p.name,p.password,p.id, pd.x, pd.y FROM player p INNER JOIN player_data pd ON p.id = pd.id WHERE p.name='+mysql.escape(user);
return db.select(query).then(function(rows) {
if (!rows.length) return;
return [
rows[0]
];
});
}
Notice how you return a promise, so that you call the then or spread method to get those database values you just queried, not having to worry about if rows will be undefined by the time you want to use it.
As you say, you need to wrap the entire server in a callback. Node.js works this way, it's asynchronous by nature. A server needs to pass through 3 stages: init, serve and deinit. In your case, that database code goes inside the init stage. You could write something like this.
//server.js
var utils = require ("./modules/utils");
var init = function (cb){
//init the utils module, the http server and whatever you need
utils.init (function (error){
if (error) return handleError (error);
cb ();
});
};
var serve = function (){
//Start listening to the http requests
};
var deinit = function (cb){
//This is typically executed when a SIGINT is received, see link1
};
init (serve);
//utils.js
//You could write a wrapper for the db instance, see link2
var mongodb = require ("mongojs");
var db;
module.exports.init = function (cb){
db = mongodb.connect ("localhost", ["modules"]);
db.modules.find ({}, function (err, modules){
if (err) return cb (err);
cb (null, modules);
});
};
I don't recommend using promises, they are slower than raw callbacks. You don't need them at all.
link1 - SIGINT
link2 - db wrapper

NodeJS | Passing an object as function parameter

I've had a small problem I couldn't overcome this week, I'm trying to pass a JSON object as a parameter in a function but it always tells me that I can't do that, but I don't want to end up sending the 50 values separately from my json object.
Here is the set up on my app, this is working as intended with express :
app.get('/', routes.index);
Here is the routing index from the previous line of code (Note that I'm using jade for rendering and I'm using the next function to pass parameters to it like the name in this one :
exports.index = function(req, res){
getprofile.profileFunc(function(result) {
res.render('index', { name: result });
});
};
Next it calls the function profileFunc from getprofile :
var profileFunc = function(callback) {
var sapi = require('sapi')('rest');
sapi.userprofile('name_here', function(error, profile) {
var result = [profile.data.name];
callback.apply(null, result);
});
};
exports.profileFunc = profileFunc;
Note that I was only able to pass a string result and have it displayed in the jade render, what I want to do is pass the profile object to use it in the render to display name, age, birthday but I can't get it to work, it will either pass a undefined object or not pass.
Thanks for taking time to read this.
I'll suggest the following:
var profileFunc = function(callback) {
var sapi = require('sapi')('rest');
sapi.userprofile('name_here', function(error, profile) {
callback.apply(null, profile);
});
};
exports.profileFunc = profileFunc;
...
getprofile.profileFunc(function(result) {
res.render('index', result);
});
If you put an Object into Jade's template context, then you'll have to reference it using the variable containing that Object. In your templates, you'd access that using name.foo, name.bar, etc.
Actually the problem is with apply, apply takes array as second argument so when you are trying to send object its not working. Just use callback function without apply like this:-
var profileFunc = function(callback) {
var sapi = require('sapi')('rest');
sapi.userprofile('name_here', function(error, profile) {
callback(profile);
});
};
exports.profileFunc = profileFunc;
...
getprofile.profileFunc(function(result) {
res.render('index', result);
});

Categories

Resources