Emit function on sinon mock - javascript

I have a function that I need to test using sinon. It takes two arguments and has different events that can be raised. I'm trying to simulate the 'ready' evet made to simulate a successful SFTP connection
function configureSFTPConnection(conn, connectionSettings) {
'use strict';
return new Promise(function(resolve, reject) {
conn.on('ready', function() {
resolve(conn);
}).on('error', function(err) {
reject(err);
}).connect(connectionSettings);
});
}
I can simulate the exterior connect function.
configureSftpStub = sinon.stub(clientObject, 'connect');
How can I force the ready callback to execute, completing the promise?
This is what I'm trying:
clientObject = new client();
configureSftpStub = sinon.stub(clientObject, 'connect');
configureSftpStub.onCall(0).returns(function() {
console.log('trying to do something');
resolve();
});
The .onCall() never seems to run.

What was needed was rather than trying to return something I needed to replace the function that was called and do a simple .emit call within the function.
configureSftpStub = sinon.stub(clientObject, 'connect', function() {
this.emit('ready');
});

Related

How to wait for a bluebird promise to settle in multiple locations?

I have a situation where a bunch of functions are needing to wait for a promise to settle because it's the init function;
self.init=new Promise(function(resolve){
//do stuff, take awhile
resolve();
});
But, while it's init'ing, the async nature means other functions that depend on it being init are being called. I want those functions to wait for the init to finish, then continue.
I tried doing this inside each function
function doSomethingUseful(){
self.init.reflect().then(function () {
//do functions purpose
});
}
function doSomethingUseless(){
self.init.reflect().then(function () {
//do functions purpose
});
}
But it only works randomly, probably only works if init has settled, and if it hasn't, it just hangs here, weirdly hangs the whole app, despite it being async.
I am trying to replace a former solution that involved intervals and checking a Boolean isInit in each function call.
Is there a bluebird function to do this? Or another way to keep waiting and checking on a promise to see if it is resolved?
The app has this sort of structure in a number of places. Usually around sqlite read/writes. An init to open the database, but while it's opening, the page is loading and it's already trying to read/write to the tables, so those read/writes are forced to wait by using setInterval and repeatedly checking to see if the init has finished.
Here's an example using google analytics.
function Analytics() {
var self = this;
self.ready = ko.observable(false).subscribeTo('application:ready'); //attached to page ready event in jquerymobile and cordova
self.trackerInit = new Promise(function (resolve, reject) {
ko.computed(function () {
if (self.ready()) {
window.ga.startTrackerWithId('id', 1000, resolve, reject);
}
});
});
}
Analytics.prototype.trackSpeed = function (cat, interval, variable, label) {
var self = this;
console.log("speed tracker", cat, interval, variable, label); //this logs
return self.trackerInit.then(function () {
console.log("speed tracker confirm init"); //this never logs, all execution stops including other async code
return new Promise(function (resolve, reject) {
window.ga.trackTiming(cat, interval, variable, label, resolve, reject);
});
}).catch(function (e) {
if (e.message === "send timeout") {
return true; //who cares about timeouts anyways
} else {
throw e;//rethrow it
}
});
};
Function is called within page change event without a return, purely async. Calling it causes all execution to stop.
The ready ko is done like this
self.ready = ko.observable(false).publishOn('application:ready');
var deviceReady = new Promise(function (resolve) {
$(document).on('deviceready', resolve);
});
var pageReady = new Promise(function (resolve) {
$(document).on('pagecreate', resolve);
});
Promise.all([deviceReady, pageReady]).then(function () {
//a couple of page of code and...
self.ready(true);
});
Changing the init like this produces the same result of a hang when checking it's results
self.trackerInit = new Promise(function (resolve, reject) {
console.log("initting");
checker = setInterval(function () {
if (window.ga) {
console.log("ready init");
window.ga.startTrackerWithId('id', 100, function(){
clearInterval(checker);
console.log("init complete");
resolve();
}, reject);
}
}, 1000);
});
They are just promises. Just use then to chain them
function doSomethingUseful() {
// wait for init to finish, then do our stuff
// return the new chained promise in case someone wants to wait on us
return self.init.then(function () {
// do stuff
});
}
function doSomethingUseless() {
// wait for init to finish, then do our stuff
// return the new chained promise in case someone wants to wait on us
return self.init.then(function () {
// do stuff
});
}
// do both of those things and then do something else!
Promise.all([doSomethingUseful(), doSomethingUseless()]).then(function () {
console.log("init is done. And we've done something useful and useless.")
}
Edit:
Based on your additional code, the problem is that if the application is "ready" before your Analytics component is constructed, then you will never receive the "application:ready" (because it came before you subscribed) so your "ready" observable will remain false. According to the postbox docs, you need to pass true as a second argument to subscribeTo so that you'll get the ready value even if it occurred in the past:
ko.observable(false).subscribeTo("application:ready", true)
However, constructing all of these observables and computeds just to feed into a promise is overkill. How about:
self.trackerInit = new Promise(function (resolve, reject) {
const s = ko.postbox.subscribe("application:ready", function (value) {
if (value) {
s.dispose(); // stop listening (prevent memory leak
window.ga.startTrackerWithId('id', 1000, resolve, reject);
}
}, true);
});
You can even turn this into a promise helper:
function whenReady(eventName) {
return new Promise((resolve, reject) => {
const s = ko.postbox.subscribe(eventName, value => {
if (ready) {
s.dispose();
resolve(value);
}
}, true);
});
}
function startGaTracker(id, timeout) {
return new Promise((resolve, reject) => window.ga.startTrackerWithId(id, timeout, resolve, reject);
}
Then you can write:
self.trackerInit = whenReady("application:ready")
.then(() => startGaTracker("id", 100));

the complications in javascript node.js?

I've learned node.js and javascript lately. I loved node.js a lot, but I am working on a project coded in node.js, mongodb, cordova etc. I notice that I needed to use Promise Object in the code a lot.
I create a module in the project to query the db and bring results. In every exported function I need to declare a local function, then use promise, for example:
I have the following local functions in the Module:
var Initialize = function() {
return new Promise(function(resolve, reject) {
try {
MongoClient.connect("db_url_conn", function(err, database) {
if (err) return console.log(err)
db = database;
return resolve(db);
})
} catch (e) {
console.error(e);
}
});
};
then in every exported function in the module I needed to use:
mongoOperation.prototype.getLength = function() {
Initialize(function(db) {
return db;
}).then(function(db) {
getSize(db).then(function(length) {
console.log(length);
});
});
}
The Question is:
Is that normal according to the nature of node.js and JavaScript nature to use promise a lot?
Do I have any other choices to fulfill that?
Since MongoClient.connect() already returns a promise, you can simplify your code:
var Initialize = function() {
return MongoClient.connect("db_url_conn");
};
...
Initialize().then(function(db) { ... });
However, this will create a new client each time you call Initialize, where you should be reusing the client for better performance and to leverage the built-in connection pool:
var client = MongoClient.connect("db_url_conn");
var Initialize = function() { return client };

SignalR: Wait for connection to be re-established before invoking methods

I have a HubProxy with many client triggered methods as described below:
proxy.invoke('hub_Subscribe');
proxy.invoke('triggerOnServer');
proxy.invoke('dataToServer',someModel);
Now if signalr is not connected to server and I try to invoke any of the above methods, it would give me Connection must be started before data can be sent. or Connection was disconnected before invocation result was received. error.
I am aware that one can utilize connection.stateChanged to confirm whether signalR is connected or not and invoke methods accordingly. But there is a need to log these events so that they can be invoked once signalr connection is up.
So, is there a simple way to log these methods in case the connection is disconnected? And later once the signalR connection is up and running, invoke those methods?
Something like proxy.invoke('dataToServer',someModel).WaitForSignalRToBeConnected();
Note: I continuously connect to server after client gets disconnected using THIS
I currently have this problem solved with the following promise:
function GetOpenConnection() {
return new Promise((resolve, reject) => {
const msMax = 1000;
const msInc = 10;
var ms = 0;
var idInterval = setInterval(() => {
if (connection.connectionState == 1) {
clearInterval(idInterval);
resolve(connection);
}
ms += msInc;
if (ms >= msMax) {
clearInterval(idInterval);
reject(connection);
}
}, msInc);
});
}
Usage:
console.log('before GetOpenConnection()');
GetOpenConnection().then((connection) => {
console.log('Connected: ', connection.connectionState);
// invoke functions here
}).catch((connection) => {
console.log('Not connected: ', connection.connectionState);
});
console.log('after GetOpenConnection()');
What you need are promises using the $q service.
I'm assuming you have to initialize the hub connection first, then invoke all three other methods in that order.
Create a service if you dont already have one for managing communication with the signalR server. Inject the $q service and signalR in it.
Wrap each of the following commands in seperate methods:
function doInitialize() {
... // connect to signalR
return $q.when(connection.hub.start());
};
function doHubSubscribe() {
return $q.when(proxy.invoke('hub_Subscribe'));
};
function doTriggerOnServer() {
return $q.when(proxy.invoke('triggerOnServer'));
};
function doDataToServer() {
return $q.when(proxy.invoke('dataToServer',someModel));
};
Now from your controller you can make nested calls to the service's methods in a way that the first one needs to pass in order for the next ones to run:
function initSignalR() {
return service.doInitialize().then(function () {
return service.doHubSubscribe();
}).then(function () {
return service.doTriggerOnServer();
}).then(function () {
return service.doDataToServer();
})
};
Edit:
If you want to add more handling to your invokations, you can use the jQuery .done() and .fail() methods like this:
function sendDataToServer() {
$q.when(proxy.invoke('dataToServer', someModel))
.done(function (result) {
deferred.resolve(result);
console.log('success!');
})
.fail(function () {
deferred.reject();
console.error('Error invoking server!');
}));
return deferred.promise;
};
You can add more handling code to the .fail() method, like for example add the $timeout service to set retries.
Hope this helps.

How to ensure asynchronous code is executed after a stream is finished processing?

I have a stream that I process by listening for the data,error, and end events, and I call a function to process each data event in the first stream. Naturally, the function processing the data calls other callbacks, making it asynchronous. So how do I start executing more code when the data in the stream is processed? Listening for the end event in the stream does NOT mean the asynchronous data processing functions have finished.
How can I ensure that the stream data processing functions are finished when I execute my next statement?
Here is an example:
function updateAccountStream (accountStream, callThisOnlyAfterAllAccountsAreMigrated) {
var self = this;
var promises = [];
accountStream
.on('data', function (account) {
migrateAccount.bind(self)(account, finishMigration);
})
.on('error', function (err) {
return console.log(err);
})
.on('end', function () {
console.log("Finished updating account stream (but finishMigration is still running!!!)");
callThisOnlyAfterAllAccountsAreMigrated() // finishMigration is still running!
});
}
var migrateAccount = function (oldAccount, callback) {
executeSomeAction(oldAccount, function(err, newAccount) {
if (err) return console.log("error received:", err);
return callback(newAccount);
});
}
var finishMigration = function (newAccount) {
// some code that is executed asynchronously...
}
How do I ensure that callThisOnlyAfterAllAccountsAreMigrated is called AFTER the stream has been processed?
Can this be done with promises? Can it be done with through streams? I am working with Nodejs, so referencing other npm modules could be helpful.
As you said, listening for the end event on the stream is useless on its own. The stream doesn't know or care what you're doing with the data in your data handler, so you would need to write some code to keep track of your own migrateAccount state.
If it were me, I would rewrite this whole section. If you use the readable event with .read() on your stream, you can read as many items at a time as you feel like dealing with. If that's one, no problem. If it's 30, great. The reason you do this is so that you won't overrun whatever is doing work with the data coming from the stream. As-is right now, if accountStream is fast, your application will undoubtedly crash at some point.
When you read an item from a stream and start work, take the promise you get back (use Bluebird or similar) and throw it into an array. When the promise is resolved, remove it from the array. When the stream ends, attach a .done() handler to .all() (basically making one big promise out of every promise still in the array).
You could also use a simple counter for jobs in progress.
Using a through stream (the npm through2 module), I solved this problem using the following code that controls the asynchronous behaviour:
var through = require('through2').obj;
function updateAccountStream (accountStream, callThisOnlyAfterAllAccountsAreMigrated) {
var self = this;
var promises = [];
accountStream.pipe(through(function(account, _, next) {
migrateAccount.bind(self)(account, finishMigration, next);
}))
.on('data', function (account) {
})
.on('error', function (err) {
return console.log(err);
})
.on('end', function () {
console.log("Finished updating account stream");
callThisOnlyAfterAllAccountsAreMigrated();
});
}
var migrateAccount = function (oldAccount, callback, next) {
executeSomeAction(oldAccount, function(err, newAccount) {
if (err) return console.log("error received:", err);
return callback(newAccount, next);
});
}
var finishMigration = function (newAccount, next) {
// some code that is executed asynchronously, but using 'next' callback when migration is finished...
}
It is a lot easier when you handle streams via promises.
Copied from here, an example that uses spex library:
var spex = require('spex')(Promise);
var fs = require('fs');
var rs = fs.createReadStream('values.txt');
function receiver(index, data, delay) {
return new Promise(function (resolve) {
console.log("RECEIVED:", index, data, delay);
resolve(); // ok to read the next data;
});
}
spex.stream.read(rs, receiver)
.then(function (data) {
// streaming successfully finished;
console.log("DATA:", data);
}, function (reason) {
// streaming has failed;
console.log("REASON:", reason);
});

Ember Mocha tests fail when async (using ember-mocha-adapter)

I can't get mocha working with Ember due to the fact that it fails when a test of the following nature is executed:
describe('Location Panel', function () {
beforeEach(function () {
App.reset();
visit('/map/41.76721,-72.66907');
});
it('Have proper address', function () {
var $title = find('.panel-header h2');
expect($title).to.have.text('476 Columbus Blvd, Hartford');
});
});
Basically it can't find any of the DOM elements, because it runs the test before the route has finished loading.. The same happens if I visit from within the test, and use andThen, etc..
Here's a jsbin for debugging.
Edit
In the jsbin, I'm using a mocked ajax call, but in my tests the ajax calls are real. I am using Ember.$.ajax wrapped in the following:
function ajax (url, options) {
return new Ember.RSVP.Promise(function (resolve, reject) {
options = options || {};
options.url = url;
options.success = function (data) {
Ember.run(null, resolve, data);
};
options.error = function (jqxhr, status, something) {
Ember.run(null, reject, arguments);
};
Ember.$.ajax(options);
});
}
Should I be using Ember.run.later as well?
You should use Ember.run.later instead of setTimeout so that the wait helper knows that it should wait.
Alternatively you can use Ember.test.registerWaiter though I don't think you need it here.
Updated JSBIN: http://emberjs.jsbin.com/gahe/1/edit

Categories

Resources