How to unit test express Router routes - javascript

I'm new to Node and Express and I'm trying to unit test my routes/controllers. I've separated my routes from my controllers. How do I go about testing my routes?
config/express.js
var app = express();
// middleware, etc
var router = require('../app/router')(app);
app/router/index.js
module.exports = function(app) {
app.use('/api/books', require('./routes/books'));
};
app/router/routes/books.js
var controller = require('../../api/controllers/books');
var express = require('express');
var router = express.Router();
router.get('/', controller.index);
module.exports = router;
app/api/controllers/books.js
// this is just an example controller
exports.index = function(req, res) {
return res.status(200).json('ok');
};
app/tests/api/routes/books.test.js
var chai = require('chai');
var should = chai.should();
var sinon = require('sinon');
describe('BookRoute', function() {
});

If you just want to unit test the route's presence and its method, you can do something like this:
auth.router.js
import { Router } from 'express';
const router = Router();
router.post('/signup', signupValidation, signupUser);
router.post('/login', loginValidation, loginUser);
router.post('/reset', resetValidation, setPasswordReset);
export default router;
auth.router.spec.js
test('has routes', () => {
const routes = [
{ path: '/signup', method: 'post' },
{ path: '/login', method: 'post' },
{ path: '/reset', method: 'post' },
]
it.each(routes)('`$method` exists on $path', (route) => {
expect(router.stack.some((s) => Object.keys(s.route.methods).includes(route.method))).toBe(true)
expect(router.stack.some((s) => s.route.path === route.path)).toBe(true)
})
Note: The use of $variables in the example test name will only work with Jest ^27.0.0
Edit: Thanks to Keith Yeh for his suggestion to put this into an each() statement. I have updated the code accordingly & the old code is below:
auth.router.spec.js (OLD)
import router from '../auth.router';
test('has routes', () => {
const routes = [
{ path: '/signup', method: 'post' },
{ path: '/login', method: 'post' },
{ path: '/reset', method: 'post' }
]
routes.forEach((route) => {
const match = router.stack.find(
(s) => s.route.path === route.path && s.route.methods[route.method]
);
expect(match).toBeTruthy();
});
});

Code:
config/express.js
var app = express();
// middleware, etc
var router = require('../app/router')(app);
module.exports = app;
app/tests/api/routes/books.test.js
var chai = require('chai');
var should = chai.should();
var sinon = require('sinon');
var request = require('supertest');
var app = require('config/express');
describe('BookRoute', function() {
request(app)
.get('/api/books')
.expect('Content-Type', /json/)
.expect('Content-Length', '4')
.expect(200, "ok")
.end(function(err, res){
if (err) throw err;
});
});
Considerations:
If your server requires an initial state at the beginning of a set of tests (because you're executing calls which mutate server state), you'll need to write a function that will return a freshly configured app and the beginning of each group of tests. There is an NPM library: https://github.com/bahmutov/really-need that will allow you to require a freshly instantiated version of your server.

This is interesting because you've separated out your controllers from your routers. The other StackOverflow article mentioned in the comments is a good way to test your controllers, I think. The thing to keep in mind with unit tests is what are you testing exactly. You shouldn't need to write tests to test the express library because presumably it has its own unit tests. So you just need to test your calls to the library. So for the books route, you just need to test this one line of code:
router.get('/', controller.index);
I looked around to see if there was an obvious way to get a list of routes from the express library, but I didn't see one. You can probably just look at the library itself and check its internals to see if you set a route correctly. Another option though is to mock it up and just check that you are calling it correctly.
This is going to get pretty complicated because you need to mock up a some fundamental parts of Javascript in order to test this one line of code. Here's how I did it:
describe('BookRoute', function() {
it("should route / to books controller index", function() {
var controller = require('../../../api/controllers/books');
var orig_this = this;
var orig_load = require('module')._load;
var router = jasmine.createSpyObj('Router', ['get']);
var express = jasmine.createSpyObj('express', ['Router']);
express.Router.and.returnValues(router);
spyOn(require('module'), '_load').and.callFake(function() {
if (arguments[0] == 'express') {
return express;
} else {
return orig_load.apply(orig_this, arguments);
}
});
require("../../../router/routes/books");
expect(router.get).toHaveBeenCalledWith('/', controller.index);
});
});
What's going on here is I used Jasmine's spyOn function to spyOn the _load function in module.js which is what handles all of the require calls. This is so that when we require the books router and it calls require('express') we can return our express SpyObj that we created with jasmine.createSpyObj. Once we have replaced express with our spy object, then we can have it return our Router SpyObj which will let us spy on router.get. Then we can check to make sure it is called with '/' and controller.index.
This could probably be made into some sort of utility if you wanted to use this a lot.
I usually avoid a lot of this thing by using a more object oriented approach and either I'm passing around some object everywhere that I can mock for tests or you could use some kind of dependency injection like Angular uses.

I found this blog incredibly insightful when testing my own servers endpoints.
In the blog he addresses:
How to use the endpoint testing library supertest.
How to programmatically spin up and tear down an express server with your needed routes before and after each endpoint test. (he also explains why you would want to do this).
How to avoid a common gotcha, require caching your modules required in your unit tests, leading to unintended consequences.
Hope this helps. Good luck and if you have any further questions let me know.

Related

Access socket emitter with socket.io in other modules nodejs express

I'll preface this by saying I am new to nodejs in general. Coming from the world of C#, it's a completely different way of thinking for me.
I've gone through a few courses and I'm setting up a little website as sort of a test for myself. And I'm failing!
I'm using socket.io with node, and I'm trying to broadcast a message with the emitter once in a while. I don't care about specific socket points right now (although I will in the future), so the emitter for this should go out to everyone.
I am having trouble accessing the io object from other modules.
I'll post my server.js file, as well as app/index.js, socket/index.js, helpers/index.js and api/index.js. I hope that posting these will show how it's supposed to work.
Ideally, I'd like to keep all socket-related items in the socket module, for consistency. Right now, I'm trying to get a method to run in the helpers module, but ideally the socket module would be better.
Anyway, server.js:
'use strict';
const express = require('express');
const app = express();
const cryptometers = require('./app');
const api = require('./app/api');
const fs = require('fs');
const sources = require('./app/api/sources.json');
app.set('port', process.env.PORT || 3000);
app.set('view engine', 'ejs');
app.use(express.static('public'));
app.use(cryptometers.session);
app.use('/', cryptometers.router);
cryptometers.ioServer(app).listen(app.get('port'), () =>{
console.log('app listening on port ' + app.get('port'));
api.getData(sources[0].source, sources[0].url, app);
setInterval(function(){api.getData(sources[0].source, sources[0].url, app)}, 60000);
});
Standard fare here. I just added a data retriever that calls to an api once every minute, which updates the database.
app/index.js:
'use strict';
const config = require('./config');
// create an IO server instance
let ioServer = app => {
app.locals.globalMarketCap = [];
const server = require('http').Server(app);
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
require('./socket')(io,app);
return server;
}
// turns router into a module
module.exports = {
router: require('./routes')(),
session: require('./session'),
ioServer,
}
Here I'm initializing socket.io, binding it to the app. It's also where I initialize a local storage array of data. (Is this a good spot to do this??)
socket/index.js:
'use strict';
const h = require('../helpers');
module.exports = (io, app) => {
io.of('/').on('connection', socket =>{
console.log('socket.io connected to client');
if(app.locals.globalMarketCap){
socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap));
}
})
}
Here I'm responding to connection events, and pushing out the array of data that I defined in the last file above. Again, ideally I'd like all socket type stuff to stay in here.
helpers/index.js:
'use strict';
const router = require('express').Router();
const db = require('../db');
// iterate through the routes object and mount the routes
let _registerRoutes = (routes, method) => {
for(let key in routes){
if(typeof routes[key] === 'object' && routes[key] !== null && !(routes[key] instanceof Array)){
_registerRoutes(routes[key], key);
} else {
// Register the routes
if(method === 'get'){
router.get(key, routes[key]);
} else if(method === 'post'){
router.post(key, routes[key]);
} else {
router.use(routes[key]);
}
}
}
}
let route = routes => {
_registerRoutes(routes);
return router;
}
let updateGlobalMarketCap = (app) =>{
//app.io.socket.emit('globalMarketCap', JSON.stringify(app.locals.globalMarketCap))
}
module.exports = {
route,
updateGlobalMarketCap
}
The commented out line for updateGlobalMarketCap is where my pain is. Trying to get access to the io object there.
api/index.js
'use strict';
const axios = require("axios");
const db = require("../db");
const h = require("../helpers");
let getData = (source, url, app, cryptoMeters) => {
axios
.get(url)
.then(response => {
//console.log(response.data);
response.data["source"] = source;
var data = new db.globalMarketCapModel(response.data);
app.locals.globalMarketCap = response.data;
var query = { source: source};
db.globalMarketCapModel.findOne({
"source":source
}, 'source old_total_market_cap_usd total_market_cap_usd', function(err, market) {
if (market) {
if(market.old_total_market_cap_usd != response.data.total_market_cap_usd
&& market.total_market_cap_usd != response.data.total_market_cap_usd){
response.data["old_total_market_cap_usd"] = market.total_market_cap_usd;
h.updateGlobalMarketCap(app);
}
db.globalMarketCapModel.update(query, response.data, function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
});
} else {
data.save(function (err) {
if (err) {
console.log("uhoh")
} else {
return true;
}
})
}
})
return true;
})
.catch(error => {
console.log(error);
return false;
});
}
module.exports = {
getData
}
The getData function here is where a call to the update emitter would take place.
I've considered using standard node event emitters as a solution to my problem, but that might be gumming up the works and there's a simpler answer.
Anyway, thanks for reading, and I'm interested in any commentary on what i've written so far. pitfalls, mistakes, etc. Learning here! :)
There are many different ways to organize your code to accomplish sharing of the io object. Here's one such scheme. You break out your socket.io initialization code into its own module. You give that module two main features:
A constructor function (that you pass the server to) that allows socket.io to initialize itself on your server.
A method to get the io instance after it's been initialized.
This will allow any other code in your project that wants to get access to the io object to do something like this:
const io = require('./io.js').getIO();
Here's how that io module could be structured:
// io.js
// singleton instance of socket.io that is stored here after the
// constructor function is called
let ioInstance;
modules.exports = function(server) {
const io = require('socket.io')(server);
io.set('transports', ['websocket']);
io.use((socket, next) => {
require('./session')(socket.request, {}, next);
});
// save in higher scope so it can be obtained later
ioInstance = io;
return io;
}
// this getIO method is designed for subsequent
// sharing of the io instance with other modules once the module has been initialized
// other modules can do: let io = require("./io.js").getIO();
module.exports.getIO = function() {
if (!ioInstance) {
throw new Error("Must call module constructor function before you can get the IO instance");
}
return ioInstance;
}
And, this module would be initialized like this:
const io = require('./io.js')(server);
Where you pass it your web server so it can hook to that. It has to be initialized like this before anyone can use .getIO() on it. The storage in the module of the ioInstance makes use of the module caching. The module initialization code is only run once. After that, the same exports are just returned each time which have access to the saved ioInstance inside the module.

ExpressJS nested route with param validation

i want to perform some validation and addition to the request on specific parameter for a route and all nested routes.
My REST structure is /room/:room/messages
In my main.js
const roomRoute = require('roomroute.js');
const messageRoute = require('messageroute.js');
app.use('/room',roomRoute);
app.use('/room/:room/messages',messageRoute);
in roomroute.js
const express = require('express');
const router = express.Router();
router.param('room', function (req,res,next,id) {
// Just for demo
var room = {
id: id,
title: 'Testroom'
};
req.room = room;
next();
});
router.get('/:room, function (req,res) {
// Display room to console
console.log(req.room);
res.sendStatus(200).end();
});
module.exports = router;
In messageroute.js
const express = require('express');
const router = express.Router({ mergeParams:true });
router.get('/', function(req,res) {
console.log(req.room); // Not working
});
module.exports = router;
When i do a get to a room eg. /room/1234 then req.room is displayed to the console, which actually is want i want.
But when i do a get to eg. /room/1234/messages the route.param('room'... of the parent is not executed, but instead just the get of the messageroute.js.
Is there a way to achieve that the param is evaluated for the parent route and also for all nested routes ?
Thank you,
Stefan
This looks like a misunderstanding of how nested routers work. In your example you seem to be looking to share a param across roomroute and messageroute, however, both of those routers have no relation to eachother.
Routers become nested when they're supplied as middleware to another router - you have an example of this already....app is a router in itself and you nest both roomroute and messageroute into it. So based on your current setup, if you want to share param('room') across both these routes you will need to configure it at app level i.e.
main.js
const roomRoute = require('roomroute.js');
const messageRoute = require('messageroute.js');
app.param('room', function (req,res,next,id) {
// Just for demo
var room = {
id: id,
title: 'Testroom'
};
req.room = room;
next();
});
app.use('/room', roomRoute);
app.use('/room/:room/messages', messageRoute);
roomroute.js
const router = express.Router({ mergeParams: true });
router.get('/:room', ...);
messageroute.js
const router = express.Router({ mergeParams: true });
router.get('/', ...);

How do I mock my config file for testing?

I have a Koa app I just started and I need to test something that grabs data from a config file.
I need to test with specific data, but I'm not sure how to modify what data the test receives from the config file.
Example:
app.js
var router = require('koa-router');
var config = require('./config.js');
var db = require('./db.js');
var auth = require('./auth');
var app = require('koa')();
router.get('/', function *() {
if(auth(this.req, config.credentials.secret)) { // Authenticates request based on a hash created using a shared secret
this.body = "Request has been authenticated";
}
});
app.use(router.routes());
app = module.exports = http.createServer(app.callback());
app.listen(3000);
appSpec.js
var request = require('supertest');
var app = require('../app.js');
describe('app', function() {
it('should authenticate all requests against config shared secret', function() {
var secret = 'some_secret';
var validHash = /* hash created from test secret and query */;
request(app)
.get('/')
.query({query: 'some_query'})
.query({hash: validHash})
.expect(403, done);
});
});
This spec will fail because the app will use the secret from the config file(empty string) instead of my test secret.
Alright, I played around with some different ways to handle this.
The best option I found, for my particular use case, was proxyquire. It's an npm package that lets you override dependencies in any file that you require in your test suites.
So if I am testing this module:
./controllers/myController.js
var config = require('../config.js');
module.exports = function() {
// Do some stuff
};
I would do something like this:
./test/controllers/myControllerSpec.js
var proxyquire = require('proxyquire');
var config = {
credentials: {
secret: 'my_secret'
}
// other fake config stuff
};
var myController = proxyquire('../../controllers/myController.js', {'../config.js', config});
describe('myController', function() {
// TESTS
});
and this instance of myController will use my test config.
This won't work for end to end testing, unless the only place you import your config is the main app file.
I use node-config for my config files and configuration loading based on machine or env variable.
You can specify your config in a variety of formats (.json, .js, yaml, etc.) Using the default settings, you need to create a config folder in your app root and a default.<format> with your default config.
To override that for testing you can create a test.<format> file in your config directory. When you set your NODE_ENV=test, then node-config will see load your default config file and then it will load your test config file and if there are any conflicts for the values, your test config file will override the values in your default file.
Here are the full docs for setting up Configuration Files in node-config
Below is an example using node-config with a .js config file format.
./config/default.js
module.exports = {
credentials: {
secret: ''
}
}
./config/test.js
module.exports = {
credentials: {
secret: 'abcdef123456'
}
}
app.js
var router = require('koa-router');
var config = require('config');
var db = require('./db.js');
var auth = require('./auth');
var app = require('koa')();
var credentials = config.get('credentials');
router.get('/', function *() {
if(auth(this.req, credentials.secret)) { // Authenticates request based on a hash created using a shared secret
this.body = "Request has been authenticated";
}
});
app.use(router.routes());
app = module.exports = http.createServer(app.callback());
app.listen(3000);
Jest has nice support for this case. In your test file, add
jest.mock('../config.js', () => ({
credentials: {
secret: 'my_secret'
}
// other fake config stuff }));

Mocha API Testing: getting 'TypeError: app.address is not a function'

My Issue
I've coded a very simple CRUD API and I've started recently coding also some tests using chai and chai-http but I'm having an issue when running my tests with $ mocha.
When I run the tests I get the following error on the shell:
TypeError: app.address is not a function
My Code
Here is a sample of one of my tests (/tests/server-test.js):
var chai = require('chai');
var mongoose = require('mongoose');
var chaiHttp = require('chai-http');
var server = require('../server/app'); // my express app
var should = chai.should();
var testUtils = require('./test-utils');
chai.use(chaiHttp);
describe('API Tests', function() {
before(function() {
mongoose.createConnection('mongodb://localhost/bot-test', myOptionsObj);
});
beforeEach(function(done) {
// I do stuff like populating db
});
afterEach(function(done) {
// I do stuff like deleting populated db
});
after(function() {
mongoose.connection.close();
});
describe('Boxes', function() {
it.only('should list ALL boxes on /boxes GET', function(done) {
chai.request(server)
.get('/api/boxes')
.end(function(err, res){
res.should.have.status(200);
done();
});
});
// the rest of the tests would continue here...
});
});
And my express app files (/server/app.js):
var mongoose = require('mongoose');
var express = require('express');
var api = require('./routes/api.js');
var app = express();
mongoose.connect('mongodb://localhost/db-dev', myOptionsObj);
// application configuration
require('./config/express')(app);
// routing set up
app.use('/api', api);
var server = app.listen(3000, function () {
var host = server.address().address;
var port = server.address().port;
console.log('App listening at http://%s:%s', host, port);
});
and (/server/routes/api.js):
var express = require('express');
var boxController = require('../modules/box/controller');
var thingController = require('../modules/thing/controller');
var router = express.Router();
// API routing
router.get('/boxes', boxController.getAll);
// etc.
module.exports = router;
Extra notes
I've tried logging out the server variable in the /tests/server-test.js file before running the tests:
...
var server = require('../server/app'); // my express app
...
console.log('server: ', server);
...
and I the result of that is an empty object: server: {}.
You don't export anything in your app module. Try adding this to your app.js file:
module.exports = server
It's important to export the http.Server object returned by app.listen(3000) instead of just the function app, otherwise you will get TypeError: app.address is not a function.
Example:
index.js
const koa = require('koa');
const app = new koa();
module.exports = app.listen(3000);
index.spec.js
const request = require('supertest');
const app = require('./index.js');
describe('User Registration', () => {
const agent = request.agent(app);
it('should ...', () => {
This may also help, and satisfies #dman point of changing application code to fit a test.
make your request to the localhost and port as needed
chai.request('http://localhost:5000')
instead of
chai.request(server)
this fixed the same error message I had using Koa JS (v2) and ava js.
The answers above correctly address the issue: supertest wants an http.Server to work on. However, calling app.listen() to get a server will also start a listening server, this is bad practice and unnecessary.
You can get around by this by using http.createServer():
import * as http from 'http';
import * as supertest from 'supertest';
import * as test from 'tape';
import * as Koa from 'koa';
const app = new Koa();
# add some routes here
const apptest = supertest(http.createServer(app.callback()));
test('GET /healthcheck', (t) => {
apptest.get('/healthcheck')
.expect(200)
.expect(res => {
t.equal(res.text, 'Ok');
})
.end(t.end.bind(t));
});
Just in case, if someone uses Hapijs the issue still occurs, because it does not use Express.js, thus address() function does not exist.
TypeError: app.address is not a function
at serverAddress (node_modules/chai-http/lib/request.js:282:18)
The workaround to make it work
// this makes the server to start up
let server = require('../../server')
// pass this instead of server to avoid error
const API = 'http://localhost:3000'
describe('/GET token ', () => {
it('JWT token', (done) => {
chai.request(API)
.get('/api/token?....')
.end((err, res) => {
res.should.have.status(200)
res.body.should.be.a('object')
res.body.should.have.property('token')
done()
})
})
})
Export app at the end of the main API file like index.js.
module.exports = app;
We had the same issue when we run mocha using ts-node in our node + typescript serverless project.
Our tsconfig.json had "sourceMap": true . So generated, .js and .js.map files cause some funny transpiling issues (similar to this). When we run mocha runner using ts-node. So, I will set to sourceMap flag to false and deleted all .js and .js.map file in our src directory. Then the issue is gone.
If you have already generated files in your src folder, commands below would be really helpful.
find src -name ".js.map" -exec rm {} \;
find src -name ".js" -exec rm {} \;
I am using Jest and Supertest, but was receiving the same error. It was because my server takes time to setup (it is async to setup db, read config, etc). I needed to use Jest's beforeAll helper to allow the async setup to run. I also needed to refactor my server to separate listening, and instead use #Whyhankee's suggestion to create the test's server.
index.js
export async function createServer() {
//setup db, server,config, middleware
return express();
}
async function startServer(){
let app = await createServer();
await app.listen({ port: 4000 });
console.log("Server has started!");
}
if(process.env.NODE_ENV ==="dev") startServer();
test.ts
import {createServer as createMyAppServer} from '#index';
import { test, expect, beforeAll } from '#jest/globals'
const supertest = require("supertest");
import * as http from 'http';
let request :any;
beforeAll(async ()=>{
request = supertest(http.createServer(await createMyAppServer()));
})
test("fetch users", async (done: any) => {
request
.post("/graphql")
.send({
query: "{ getQueryFromGqlServer (id:1) { id} }",
})
.set("Accept", "application/json")
.expect("Content-Type", /json/)
.expect(200)
.end(function (err: any, res: any) {
if (err) return done(err);
expect(res.body).toBeInstanceOf(Object);
let serverErrors = JSON.parse(res.text)['errors'];
expect(serverErrors.length).toEqual(0);
expect(res.body.data.id).toEqual(1);
done();
});
});
Edit:
I also had errors when using data.foreach(async()=>..., should have use for(let x of... in my tests

Original function is called instead of stub

I'm having an issue getting Sinon's stub to work correctly for me. When I stub list on retro and the test runs, app.get('/retro', retro.list) is executing the original function retro.list instead of the stub. Since this happens, the test fails because the stub's callCount is 0.
I'm more familiar with coffeescript and I have stubbed things in the same way. Is there something I'm not understanding about Javascript's scoping, or how the require('../routes/retro') works, or is retro is not the same in app.js and test.js.
Much thanks for the help and code below.
test.js:
var request = require('supertest')
, retro = require('../routes/retro')
, app = require('../app')
, sinon = require('sinon');
require('should');
describe('GET /retro', function() {
// less involved, but maybe stupid to test
it('should call retro.list', function(done) {
var stub = sinon.stub(retro, 'list');
request(app)
.get('/retro')
.end(function(err, res){
stub.callCount.should.equal(1);
if (err) return done(err);
done();
})
})
})
app.js:
var express = require('express')
, config = require('./config')
, routes = require('./routes')
, retro = require('./routes/retro');
var app = express();
config(app);
app.get('/', routes.index);
app.get('/retro', retro.list);
module.exports = app;
retro.js:
var retro = {
list: function(req, res){
console.log('actual called');
res.send("respond with a resource");
}
}
module.exports = retro;
You'll likely need to create your stubs before requiring/creating the app.
var request = require('supertest')
, sinon = require('sinon')
, retro = require('../routes/retro');
var stubRetroList = sinon.stub(retro, 'list');
var app = require('../app');
// ...
stubRetroList.callCount.should.equal(1);
This allows retro.list to be updated before it's passed to the route:
app.get('/retro', retro.list);
The issue is probably because retro.list isn't being passed-by-reference (pointer), but is rather a reference that's passed-by-value (copied). So, though sinon.stub() is altering retro.list, it wouldn't affect the copy that the '/retro' route already had.
I faced the same issue and the accepted answer (while true) was of no help. Turns out in order for sinon stubbing to work the stubbed method cannot be used in the same module. In other words stubbing a module endpoint will only stub the module endpoint and not the internal usage of the function referenced by module.exports.
Explained via an example:
module.js
const express = require('express')
const router = express.Router()
router.get('/', function (req, res) {
res.status(200).json(list())
})
function list() {
return ['something']
}
module.exports = {
router: router,
list: list
}
module.spec.js
// This stub will not work
sinon.stub(module, 'list').callsFake(() => ['something else'])
To make it work you have separate what you want to stub into its own module and use it that way:
sub_module.js
function list() {
return ['something']
}
module.exports = {
list: list
}
Now sub_module.list() can be stubbed.
(OP defines a method in place so this is not an issue for him)

Categories

Resources