if I have a function as the following:
function foo(request,response){
var val = request.param('data');
//code here
});
how can I create a mocha test function for this to pass the request and response parameters.
The function you wrote above can be seen as a controller - it handles the request and gives back a response.
There are few things you can do:
You can test the route itself - make a http request to the endpoint which uses this controller and test if it behaves correctly - you can use request/supertest/superagent libraries for example.
You can mock the request and response objects and test the code directly - it doesn't require a server to be started, but you need to spend some time to mock out the objects correctly.
It depends on what your "code here" does, and what you want to do :
Test the logic
If you can separate the code in a method that accepts "val", and returns a result, then just test that.
Usually, getting the params from a request, and passing the results to a response is a no-brainer, and not worth testing.
foo : function (req, res) {
// Do you really need to test that ?
var data = req.param("data");
// You probably want to test that
var bar = doFooLogic(data);
// Do you really need to test that ?
res.json(bar);
},
doFooLogic : function (data) {
...
}
And a test like :
describe("foo's logic", function () {
it("does stuff", function () {
// Just test the logic.
// This assumes you exposed the doFooLogic, which is probably acceptable
var bar = doFooLogic(42);
assert(bar.xxxx); // Whatever
});
});
Test the req/response function:
If you really want that, if you're just using "param" on the request object, you might be able to easily mock the request / response (this is JS, you just need to pass something that has the same functions available) :
describe(..., function () {
it("does whatever", function () {
var mockRequest = {
param : function (key) {
if (key === "data") {
return 42;
} else {
throw new Error("UNexpected key", key)
}
}
}
var mockResponse = {
// mock whatever function you need here
json : function (whatever) {
assert(whatever.xxxx) // compare what you put into the response, for example
}
}
// Then do the call
foo (mockRequest, mockResponse);
// The hard part is then how to test the response was passed the right stuff.
// That's why testing the logic is probably easier.
I think you can simply mock it with something like Sinon.js . It should be something like this:
describe('...', function( done ){
it('should test something', function(done){
var mock = sinon.stub(request, "param").withArgs("data").returns("Whatever");
var val = request.param('data');
//do your logic with that value
assert.equal(/*whatever value you want check*/);
mock.restore();
done();
}
}
And you don't have to take care about request's content.
Related
In my controller called MapController I'm doing a function to do a parse of remote json files, and from an if-else structure add some values in an array called "parsewebservice", apparently everything is working fine but console.log ( parsewebservice); is not returning the values that were passed to the array "parsewebservice" in the place where it is returning it empty. But when I put it inside the forEach it returns, but everything cluttered and repeated then is not the right way.
I wanted to know why the values that were passed to the array "parsewebservice" are not going along with the variable after populada and what would be the correct way to do it?
Here is my code below:
/**
* MapController
*
* #description :: Server-side logic for managing Maps
* #help :: See http://sailsjs.org/#!/documentation/concepts/Controllers
*/
module.exports = {
index: function(req, res, next) {
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
datas.forEach(function(data, index) {
var req = require("request");
var url = data.address + "?f=pjson";
req(url, function(err, res, retorno) {
if (err) {
console.log(err);
} else {
var camadas = JSON.parse(retorno);
if (camadas.mapName) {
camadas.layers.forEach(function(campo, i) {
if (campo.subLayerIds != null) {
} else if (campo.subLayerIds == null) {
parsewebservice.push([i, "dynamicMapLayer", campo.name, data.address]);
}
});
} else if (camadas.serviceDataType) {
parsewebservice.push([null, "imageMapLayer", camadas.name, data.address]);
} else if (camadas.type) {
parsewebservice.push([null, "featureLayer", camadas.name, data.address]);
}
}
});
});
console.log(parsewebservice);
});
},
};
My first comment has to be that you should not combine function(req, res) with var req = require('request')... you lose your access to the original req object!
So, you need to run a list of async tasks, and do something when they are all complete. That will never be entirely easy, and no matter what, you will have to get used to the idea that your code does not run from top to bottom as you've written it. Your console.log at the bottom runs before any of the callbacks (functions you pass in) you pass to your external requests.
The right way to do this is to use promises. It looks like you are using this request library, whose returned requests can only accept callbacks, not be returned as promises. You can create your own promise wrapper for them, or use an alternative library (several are recommended on the page).
I don't want to write a whole intro-to-promises right here, so what I will do is give you a less pretty, but maybe more understandable way to run some code at the completion of all your requests.
Data.find(function foundData(err, datas) {
if (err) return next(err);
var parsewebservice = [];
// here we will write some code that we will run once per returned data
var processResponse = function(resp) {
parsewebservice.push(resp);
if(parsewebservice.length >= datas.length) {
// we are done, that was the final request
console.log(parsewebservice);
return res.send({data: parsewebservice)}); // or whatever
}
};
datas.forEach(function(data, index) {
var request = require("request");
var url = data.address + "?f=pjson";
request(url, function(err, res, retorno) {
// do some processing of retorno...
// call our function to handle the result
processResponse(retorno);
});
});
console.log(parsewebservice); // still an empty array here
});
I solved the problem.
the "request" module is asynchronous so we need to wait for it to respond and then send the response to the view.
To do this we created a function called "foo" to contain the foreach and the request, we made a callback of that function and finally we made the response (res.view) within that function, so that the controller response would only be sent after the response of the "foo" function to the callback. So we were able to parse.json the data from the "data" collection using foreach and the "request" module and send the objects to the view.
Many thanks to all who have helped me, my sincere thanks.
I have a following e2e scenario written using Nightwatch:
var Q = require('q');
module.exports = {
afterEach: function (browser, done) {
browser.end(function() {
done();
});
},
'should display same data on second page as on first page': function (browser) {
//Given
var firstPage = bowser.pages.first()
//When
Q.all([
firstPage.getTextPromise('#element1'),
firstPage.getTextPromise('#element2'),
firstPage.getTextPromise('#element3')]
).then( function(values) {
users.click('#linkToSecondPage');
//Then
var secondPage = browser.page.secondPage();
secondPage.expect.element('#dataElement1').text.to.equal(values[0]).before(5000);
secondPage.expect.element('#dataElemnet2').contains.text(values[1]);
secondPage.expect.element('#dataElement3').contains.text(values[2]);
});
} }
The getTextPromise command is defined by me in following way:
commands: [{
getTextPromise: function(selector) {
var self = this;
return Q.Promise(function (resolve, reject) {
self.getText(selector, function(result) {resolve(result.value); });
});
} }]
The rationale behind this scenarion is to remember some values on one page before clicking on link to second page
and then checking that on second page the same content is displayed (for example, you click on one item in a table
and go to page displaying details of this particular item).
Unfortunately, I observed that this test sometimes does not check things inside the then callback.
I think this is caused by the test finishing (calling done in afterEach()) before he callback returns.
I thought there was a done() callback passed to the test (much like in nightwatch async unit tests) that I could use but apparently there is not.
Is there a proper way to do this in Nightwatch? Perhaps I am using commands in wrong way?
Nightwatch will keep track of command run order itself if the command runs a method on 'this', and returns 'this'.
Try a command like this, adapted as a page command if you prefer:
exports.command = function() {
var self = this;
var globals = self.globals;
if (!globals.values) { globals.values = []; }
var link = 'some_xpath';
self.getText('selector', function(result) {
if(result.status !== -1){
self.globals.values.push = result.value;
}
});
return self;
};
Because the command returns this. It can be chained and you could be sure the commands run in order without manually writing promises.
example:
var firstPage = bowser.pages.first()
var secondPage = browser.page.secondPage();
firstPage.getTextPromise('#element1')
.getTextPromise('#element2')
.getTextPromise('#element3');
secondPage.expect.element('#dataElement1').text.to.equal(global.values[0]).before(5000)
.expect.element('#dataElemnet2').contains.text(global.values[1])
.expect.element('#dataElement3').contains.text(global.values[2]);
I haven't tested this out so it may need a slight tweak. Hopefully it gives a rough idea of how to chain your commands the nightwatch way. If you run into a problem let me know.
Simply put, I'm trying to dynamically generate an AJAX request based off a scenario that I'm retrieving via an AJAX request from a server.
The idea is that:
A server provides a "Scenario" for me to generate an AJAX Request.
I generate an AJAX Request based off the Scenario.
I then repeat this process, over and over in a Loop.
The big idea is that I can change the second AJAX request dynamically, based off the Scenario given from the server.
I have this working, but I feel like the way I'm doing this is very messy. Is there any better way to go about thinking through this problem? Perhaps promises? If anyone could please review this and provide feedback or suggestions on how to clean it up--that would be greatly appreciated.
Here is my code: http://jsfiddle.net/6jph0e98/
(please open the console to see everything in action)
As a reference, here is the scenario data I'm currently working with:
var scenario = {
"base": {
"frequency": "5000"
},
"endpoints": [
{
"method": "GET",
"type": "JSON",
"endPoint": "https://api.github.com/users/alvarengarichard",
"queryParams": {
"objectives": "objective1, objective2, objective3"
}
}
]
}
Here are my 2 cents: http://jsfiddle.net/3Lddzp9j/6/.
Yes, I think you can do this more elegantly by chaining promises. So I figured out what I think your app does, and how you can do it by chaining these promises. What is interesting that certain steps already return promises ( the jQuery AJAX calls ) but others don't. For those - we have to create our own promise that instantly resolves. And then there was the timeout which we wrapped in a promise.
Also, I tried to use some JS best practices, like keeping things out of the global space by wrapping them in an IIFE and applying the module pattern.
This makes the overall control flow of your application nice and clean IMHO:
var run = function() {
getScenario()
.then(mapToInstruction)
.then(waitForTimeout)
.then(callApi)
.then(handleResults)
.then(run);
};
And also hides the private members and only exposes the run() method:
return {
// This will expose only the run method
// and will keep all other functions private
run : run
}
Hope it helps - let me know what you think. Here's the full source, with comments:
// First of all - I'm using the javascript module pattern here
// this will all be much more easy once ES6 it out, but this will
// have to do for now.
// Also, I'm importing jQuery into the module as you can see, which
// is wrapped inside the IIFE ( Google it ) which keeps things nicely
// out of the global scope.
var App = (function ($) {
// Gets the scenario from the API - $.get is just some syntactic
// sugar for $.ajax with GET as method - NOTE: this returns a promise
var getScenario = function () {
console.log('Getting scenario ...');
return $.get('http://demo3858327.mockable.io/scenario');
};
// The result of the previous promise is passed into the
// next as we're chaining. So the data will contain the
// result of getScenario
var mapToInstruction = function (data) {
// We map it onto a new instruction object
var instruction = {
method: data.endpoints[0].method,
type: data.endpoints[0].type,
endpoint: data.endpoints[0].endPoint,
frequency: data.base.frequency
};
console.log('Instructions recieved:');
console.log(instruction);
// And now we create a promise from this
// instruction so we can chain it
var deferred = $.Deferred();
deferred.resolve(instruction);
return deferred.promise();
};
// This wraps the setTimeout into a promise, again
// so we can chain it
var waitForTimeout = function(instruction) {
console.log('Waiting for ' + instruction.frequency + ' ms');
var deferred = $.Deferred();
setTimeout(function() {
deferred.resolve(instruction)
}, instruction.frequency);
return deferred.promise();
};
// Final step: call the API from the
// provided instructions
var callApi = function(instruction) {
console.log('Calling API with given instructions ...');
return $.ajax({
type: instruction.method,
dataType: instruction.type,
url: instruction.endpoint
});
};
var handleResults = function(data) {
console.log("Handling data ...");
var deferred = $.Deferred();
deferred.resolve();
return deferred.promise();
};
// The 'run' method
var run = function() {
getScenario()
.then(mapToInstruction)
.then(waitForTimeout)
.then(callApi)
.then(handleResults)
.then(run);
};
return {
// This will expose only the run method
// and will keep all other functions private
run : run
}
})($);
// ... And start the app
App.run();
I am trying to read data from json and wait until data will be fetched into $scope.urls.content. So I write code:
$scope.urls = { content:null};
$http.get('mock/plane_urls.json').success(function(thisData) {
$scope.urls.content = thisData;
});
And now I am trying to write something like callback but that doesn't work. How can i do that? Or is there any function for this? I am running out of ideas ;/
Do you mean that ?
$http.get('mock/plane_urls.json').success(function(thisData) {
$scope.urls.content = thisData;
$scope.yourCallback();
});
$scope.yourCallback = function() {
// your code
};
You want to work with promises and $resource.
As $http itself returns a promise, all you got to do is to chain to its return. Simple as that:
var promise = $http.get('mock/plane_urls.json').then(function(thisData) {
$scope.urls.content = thisData;
return 'something';
});
// somewhere else in the code
promise.then(function(data) {
// receives the data returned from the http handler
console.log(data === "something");
});
I made a pretty simple fiddle here.
But if you need to constantly call this info, you should expose it through a service, so anyone can grab its result and process it. i.e.:
service('dataService', function($http) {
var requestPromise = $http.get('mock/plane_urls.json').then(function(d) {
return d.data;
});
this.getPlanesURL = function() {
return requestPromise;
};
});
// and anywhere in code where you need this info
dataService.getPlanesURL().then(function(planes) {
// do somehting with planes URL
$scope.urls.content = planes;
});
Just an important note. This service I mocked will cache and always return the same data. If what you need is to call this JSON many times, then you should go with $resource.
I want to intercept every call to render, do my own stuff, and then proceed with the original render method. I know this can easily be done through a middleware like this:
function (req, res, next) {
var _render = render;
res.render = function () {
// custom stuff
_render.apply(this, arguments);
}
next();
}
However it seems more efficient to just change the prototype of the response object instead of replacing res.render on every request. I tried such a solution with no success. When logging http.ServerResponse.prototype there's no trace of any render method.
Finally i've tried to just intercept app.render instead, like this:
var _render = app.render;
app.render = function () {
// this is refering to app instead of res...
_render.apply(this, arguments);
}
That does fulfill my criteria of only being done once, but it is called on the app object and not the res object which means I can't access the res or req objects.
Basically what I think I would like to do is something like:
var _render = something.response.render;
something.response.render = function (view, data, callback) {
// Access res.*, as this.*
_render.call(this, view, data, callback);
};
Any ideas how to achieve that?
I guess that it depends on Express version. Try this for 3.x:
var response = require( "express" ).response;
var _render = response.render;
response.render = function( ) {
// do your stuff
_render.apply( this, arguments );
};