Jasmine how to use done()? - javascript

I'm trying to wrap my head around done() so I can test an asynchronous web service call. The documentation from Jasmine and others don't make sense to me, I can't see where the code goes, where the test for completion goes, or where the actual Jasmine tests go. They don't even use any asynchronous calls.
This question previously asked about runs() and waitsFor(), they've been deprecated in 2.0, apparently because "It's much simpler." using done()! In any case, on my version 2.6, using done() anywhere in the code brings up a reference error that it isn't defined, so I'm really not sure what's going on.
This is the code I'd like to be able to adapt. The code to call is helper.send(url, data, params); and the code is done when response !== null.
it ('should be able to invoke a web service with POST as the HTTP method', function() {
// Data
let data = {name: 'StackOverflow'};
// POST
let params = {method: 'POST'};
// This call is asynchronous
// The URL just echoes back the get and post parameters
request.send(url, data, params);
// Need to wait for response !== null
// ...
// Can now resume tests
expect(response.post.name).toEqual('StackOverflow');
});
If anyone can help with how to reorganize this to work with done(), it would be much appreciated.

You need to move your function that handles the response into the it body. Things get reorganized a bit:
// *** Pass "done" in the function, Jasmine automagically knows what to do with it
it ('should be able to invoke a web service with POST as the HTTP method', function(done) {
// *** Local callback to use here
let localCallback = function(/* arguments */) {
// *** Local copy of response
let response = null;
// *** Copy your stuff into response
...
// *** Moved your tests here
// Can now resume tests
expect(response.post.name).toEqual('StackOverflow');
// *** Let Jasmine know this "it" is finished
done();
}
// *** Do your setup here to use the local callback above
let request = ...(localCallback);
// Data
let data = {name: 'StackOverflow'};
// POST
let params = {method: 'POST'};
// This call is asynchronous
// The URL just echoes back the get and post parameters
request.send(url, data, params);
});

If your request is promise based you would do something like this:
it ('should be able to invoke a web service with POST as the HTTP method',
function(done) {
// Data
let data = {name: 'StackOverflow'};
// POST
let params = {method: 'POST'};
// This call is asynchronous
// The URL just echoes back the get and post parameters
request.send(url, data, params).then(
funciton(response) {
expect(response.post.name).toEqual('StackOverflow');
done();
}
).catch(
function(err) {
done(new Error(err));
}
);
}
);
This will only run the expect when the data is returned.
done is called with no params to indicate that your code is finished.
done is called with an Error object to indicate that something failed. The message of the Error object should tell what failed.

Related

HTTP Post calls sequentially Angularjs

I am attempting to make sequential post requests with angular just changing the request body. The reason for this is I have a REST API that I am calling to create users but it takes some time to return. I wanted to basically send up the requests in batches essentially calling the same endpoint just different request body. I have seen other questions about calling functions sequentially but they always seem to be a set number of functions that do different things. I just can't wrap my brain around the obvious recursion here.
So far I have this function that returns the promise but I don't understand how to write the recursion to call this function to go through all of $scope.csvResults.
$scope.importUsersPromise = function(currentIndex, step) {
var nextInput = $scope.csvResults.slice(currentIndex, currentIndex+step);
var requestBodyUsers = {
"mode": "SCRIPT",
"inputParams": [
JSON.stringify(nextInput)
]
};
return $http({
method: 'POST',
url: api_url + "v1/serverAction/",
headers: {
"Authorization":"user",
"Content-Type":"application/json"
},
requestBodyUsers
});
};
Say you have an array users with all the different request bodies. Then you do something like this:
var users = [/* your request bodies */];
var errors = [/* will store the errors */];
// make the first api call
var promise = apiCall(users[0]);
// use .reduce to chain all requests
promise = users.slice(1).reduce(function(promise, user){
return promise.then(apiCall.bind(null, user));
}, promise);
promise
.then(function(){
// do something when all users are inserted
})
.finally(function(){
// do something when all requests are done
// even if some of them have failed
console.log(errors);
})
function apiCall(user) {
return $http({... })
}
You have to keep in mind that if one of the requests fails the chain is broken and the following requests will not be send. If you want to send them anyway you should use .finally and optionally .catch to collect the errors:
// use .reduce to chain all requests
promise = users.slice(1).reduce(function(promise, user){
return promise
.catch(err => errors.push(err)) // fail handler (optional)
.finally(apiCall.bind(null, user)); // always make the next api call
}, promise);
It is good idea for you to check angular's documentation if not already ;)
You can put all the request set in a requestArray.
Please check out this link.

Using a promise in Aurelia for data retrieval and caching

I've created a data service that gets data sets from an API, but I'd like to have it first cache it locally and check if the same data is already available (nevermind the stale data factor... I'll deal with that next). Here's my code:
getData(url, use_cache = true) {
// Http Fetch Client to retreive data (GET)
let cache_index = this.cache.findIndex(r => { return r.url === url; });
if ((use_cache) && (cache_index > -1) && (this.cache[cache_index].data.length)) {
// Use cached data (available)
console.log("Found cached data!", this.cache[cache_index].data);
//
// I think this next line is the problem... need to return a promise???
//
return this.cache[cache_index].data;
} else {
console.log("Retrieving records from " + url);
return this.httpClient.fetch(url, {
credentials: 'include'
}).then(response => {
// Old statement was simple...
// return response.json();
// New method seems to be working because it's saving the data into the cache
return response.json().then(result => {
this.cache.push({'url': url, 'data': result});
// Not sure why I need this next line, but I do.
return result;
});
});
}
}
It works fine to retrieve the data the first time, and even on the second call I can see (from the console log) that it finds the correct cached data, but I'm getting an error that I believe is related to promises, which is not in my area of expertise yet.
Error message:
ERROR [app-router] TypeError: this.core.getData(...).then is not a function
This error is actually in my viewmodel's caller, which looks like this:
getAccounts() {
this.core.getData('/accounting/account/all').then(response => {
this.accounts = response;
});
}
I guess since when the data is cached, instead of returning a promise it's actually returning the data, and there's no .then method on the raw data.
I suspect I need to either create a fake promise (even though it's not an async transaction) to return when the data is cached or improve the way I'm calling this method from my data service (or returning the data).
Any ideas on how to fix this current problem? Any free advice on this whole topic as it relates to Aurelia?
I guess since when the data is cached, instead of returning a promise it's actually returning the data, and there's no .then method on the raw data.
Yes.
I suspect I need to either create a fake promise (even though it's not an async transaction) to return when the data is cached
Possible (using Promise.resolve), but no.
…or improve the way I'm calling this method from my data service (or returning the data).
No, for sure you shouldn't need that.
Instead, there's a much simpler solution: cache the promise object itself, and return the same promise from every call for that url!
getData(url, use_cache = true) {
// Http Fetch Client to retreive data (GET)
if (use_cache && url in this.cache)
return this.cache[url];
else
return this.cache[url] = this.httpClient.fetch(url, {
credentials: 'include'
}).then(response => response.json());
}
This has the additional benefit that you'll never have two parallel requests for the same resource - the request itself is cached, not only the arrived result. The only drawback is that you also cache errors, if you want to avoid that and retry on subsequent calls then you have to drop the cache on rejections.

Why modify request after request.post

Newbie question while trying to understand code created by others. Believe me I tried to understand this. Here goes..
For what reason would someone still call functions like .qs() and .json() in Request - module after we got what we need with .post() and sent the response already. They can't affect the request.post as they are called afterwards, can they?
With my skills I'm not able to understand from response module API docs (v2.22.0) what these actually do.
This is not the whole code but I tried to get the important parts here:
// When request comes to /getthisapge, make external query and return data in JSON format.
var request = require('request');
module.exports = function(app) {
app.get('/getthispage', function(req, res, next) {
var filter = {};
var query = {};
filter.category = req.query.category;
query.onBehalf = req.query.onBehalf;
request.post(URIandoptions, function(error, response, body) {
res.json(body.members)
}).qs(query).json(filter);
}
}
Without knowing exactly what the post function does (unnecessary to your question), you need to look at the order of execution.
request.post(URIandoptions, function (error, response, body){
res.json(body.members)
})
.qs(query) // ?
.json(filter); // ?
The function passed into post() does not get called at that specific moment. It is given to the post() function to do with as it pleases. This means technically that the function may never be called (depends on the api).
qs() and json() both get called upon the returning of the prior function. Usually this type of api means the following:
call post(), passing in a function to be run on completion
call qs() to setup the query details
call json() to tell the post function how to act, which in turn executes the actual post, running the completion function after data has been retrieved.

Dangling callbacks: return response before every callback has returned

Question: Would you consider dangling callbacks as bad node.js style or even dangerous? If so under which premise?
Case: as described below, imagine you need to make calls to a DB in an express server that updates some data. Yet the client doesn't need to be informed about the result. In this case you could return a response immediately, not waiting for the asynchronous call to complete. This would be described as dangling callback for lack of a better name.
Why is this interesting?: Because tutorials and documentation in most cases show the case of waiting, in worst cases teaching callback hell. Recall your first experiences with say express, mongodb and passport.
Example:
'use strict'
const assert = require('assert')
const express = require('express')
const app = express()
function longOperation (value, cb) {
// might fail and: return cb(err) ...here
setTimeout(() => {
// after some time invokes the callback
return cb(null, value)
}, 4000)
}
app.get('/ping', function (req, res) {
// do some declartions here
//
// do some request processesing here
// call a long op, such as a DB call here.
// however the client does not need to be
// informed about the result of the operation
longOperation(1, (err, val) => {
assert(!err)
assert(val === 1)
console.log('...fired callback here though')
return
})
console.log('sending response here...')
return res.send('Hello!')
})
let server = app.listen(3000, function () {
console.log('Starting test:')
})
Yeah, this is basically what called a "fire and forget" service in other contexts, and could also be the first step in a good design implementing command-query response separation.
I don't consider it a "dangling callback", the response in this case acknowledges that the request was received. Your best bet here would be to make sure your response includes some kind of hypermedia that lets clients get the status of their request later, and if it's an error they can fix have the content at the new resource URL tell them how.
Think of it in the case of a user registration workflow where the user has to be approved by an admin, or has to confirm their email before getting access.

A design pattern for async requests to handle success, failure, retry ? (javascript)

I'm writing a mobile app with Appcelerator Titanium that makes a lot of different xhr requests. This is not really an Appcelerator Titanium specific question. But if you do write some code, I hope it's javascript.
The app needs to authenticate itself, the user must be logged for some interactions, etc.
I've come to a point where any request might get any kind of response such as:
not authenticated
not logged
bad params
successful
...
The requests are wrapped in different model methods or helpers.
The thing is, I'm not familiar with this kind of app. I was wondering what are the best practices.
Some real questions for example would be:
If the app is not authenticated (token expired, first launch), should the app try to authenticate itself and then send again the request that was denied ? (transparent to user)
Should I send an authentication request each time the app launches and then "forget" about it?
The problem I'm facing is that the code becomes quickly big if I try to handle this for each request. Full of nested callbacks, retry conditions, various events listeners to manage, etc. It just does not feel very "nice". And it's not DRY at all, when what I really need is for any request, check what was wrong, try to fix it (authenticate if not, automatic login if possible or show the login UI, etc..) then if that works retry the original request a couple of times, abort if needed.
I've been looking at the promise pattern but only know theory and don't know if it could be what I need.
So I welcome any advice regarding this particular problem. I wonder how apps like "Facebook" handle this.
Thank you for your help
This question is not easily answered, but let me try to give you some Ideas:
The most important thing, before coding anything in your app, is the API itself. It has to be reliable and adhere to standards. I will not go into too much detail here, but a well written RESTful API can reduce the complexity of your httpClient significantly. It has to respond with standard http status codes and to methods like POST, GET, PUT, DELETE...
A pretty good read is The REST API Design Handbook by George Reese.
My approach to httpClients with Titanium is a single module, which is loaded via require() wherever needed. I stick to one single client at a time, as I had massive problems with multiple parallel calls. Whenever a call is made, the client checks if there is already a call in progress and sends it to a queue if necessary.
Let me show you an example. I have left out lots of stuff for sake of brevity:
// lib/customClient.js
var xhrRequest; // This will be our HTTPClient
var callQueue = []; // This will be our queue
// Register the request
// params are:
// method (e.g. 'GET')
// url (e.g. 'http://test.com/api/v1/user/1')
// done (callback function)
function registerRequest(params) {
if(!xhrRequest) {
sendRequest(params);
} else {
queueRequest(params);
}
}
// This simply sends the request
// to the callQueue
function queueRequest(params) {
callQueue.push(params);
}
// Send the request with the params from register
// Please note that I do not hardcode error messages,
// I just do it here so it is easier to read
function sendRequest(params) {
// Set callback if available and valid
var callback = params.done && typeof(params.done) === "function" ? params.callback : null;
// Set method
var method = params.method || 'GET';
// Create the HTTP Client
xhrRequest = Ti.Network.createHTTPClient({
// Success
onload: function() {
// You can check for status codes in detail here
// For brevity, I will just check if it is valid
if (this.status >= 200 && this.status < 300) {
if(this.responseText) {
// You might want to check if it can be parsed as JSON here
try {
var jsonData = JSON.parse(this.responseText);
if(callback) callback({ success: true, response: jsonData });
} catch(e) {
if(callback) callback({ success: false, errormessage: 'Could not parse JSON data' });
}
processQueue();
} else {
if(callback) callback({ success: false, errormessage: 'No valid response received' });
processQueue();
}
} else {
if(callback) callback({ success: false, errormessage: 'Call response is success but status is ' + this.status });
processQueue();
}
},
// Error
onerror: function(e) {
if(this.responseText) {
try {
var jsonData = JSON.parse(this.responseText);
if(callback) callback({ success: false, reponse: jsonData });
} catch(e) {};
}
processQueue();
},
});
// Prepare and send request
// A lot more can (and should) be configured here, check documentation!
xhrRequest.setTimeout(10000);
xhrRequest.open(method, params.url);
xhrRequest.send();
}
// Checks if there is anything else in the queue
// and sends it
function processQueue() {
xhrRequest = null;
var nextInQueue = callQueue.shift();
if(nextInQueue) sendRequest(nextInQueue);
}
// Our public API
var publicAPI = {
sendRequest: function(params) {
registerRequest(params);
}
};
module.exports = publicAPI;
I can then send a call from any other controller/view
var customClient = require('lib/customClient'); // omit 'lib' if you use alloy
// Send the request
customClient.sendRequest({
method : 'GET',
url : 'http://test.com/api/v1/user/1',
done : function(response) {
Ti.API.debug(JSON.stringify(response));
}
});
Note that this is not complete and does not check for connectivity, has no real error handling etc., but it might help you to get an idea.
I think there is loads of stuff to talk about here, but I will stop here for now...

Categories

Resources