Handle Success/Error Responses from ssh2 using Promises - javascript

I'm building a node.js app which in production will act as a SSH client to many servers, some of which may be inaccessible at any given time. I'm trying to write a function which attempts to run a SSH command with each client in its config upon startup, and I'm not able to handle both successful sessions and those which end in error. I wrapped a ssh2 client in a promise. If I remove the third (trash) server and only successes result, this works fine! See the output:
STDOUT: Hello World
STDOUT: Hello World
Session closed
Session closed
Successful session: Hello World,Hello World
But if one of the connections times out, even though I handle the error, I don't get to keep any of my data. It looks like the error message overwrites all of the resolved promises
Successful session: Error: Timed out while waiting for handshake,Error:
Timed out while waiting for handshake,Error: Timed out while waiting
for handshake
Here's my code, forgive me if this is a bit scattered, as I've combined a few of my modules for the sake of this question. My goal is to keep the data from the successful session and gracefully handle the failure.
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let numLabs = Object.keys(labs).length;
let promises = [];
for(i=0;i<numLabs;i++){
let labName = Object.keys(labs)[i];
promises.push(asyncSSH("echo 'Hello World'", labs[labName]));
}
Promise.all(promises.map(p => p.catch(e => e)))
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e));
}
var sendSSH = function (command, dest, callback) {
var conn = new Client();
conn.on('ready', function() {
return conn.exec(command, function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
callback(null, data);
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data){
callback(err);
console.log('STDERR: ' + data);
}).on('close', function(err) {
if(err) {
console.log('Session closed due to error');
} else {
console.log('Session closed');
}
});
stream.end('ls -l\nexit\n');
});
}).on('error', function(err){
callback(err);
}).connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
};
function asyncSSH(command, dest) {
return new Promise(function(resolve, reject){
sendSSH(command, dest, function(err,data) {
if (!err) {
resolve(data);
} else {
reject(err);
}
});
});
}
checklabs();
How can I better use this promise wrapper to handle whatever errors come from the ssh2 client? Any tips are appreciated.

To get best use from each connection, you can (and arguably should) promisify separately :
the instatiation of each Client() instance
each instance's conn.exec() method (and any other asynchronous methods as required)
This will allow each instance of Client() to be reused with different commands (though not required in this example).
You should also be sure to disconnect each socket when its job is done, by calling client_.end(). For this, a "disposer pattern" is recommended.
With those points in mind and with a few assumptions, here's what I ended up with :
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let promises = Object.keys(labs).map((key) => {
return withConn(labs[key], (conn) => {
return conn.execAsync("echo 'Hello World'")
.catch((e) => "Error: " + e.message); // catch in order to immunise the whole process against any single failure.
// and inject an error message into the success path.
});
});
Promise.all(promises)
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e.message)); // with individual errors caught above, you should not end up here.
}
// disposer pattern, based on https://stackoverflow.com/a/28915678/3478010
function withConn(dest, work) {
var conn_;
return getConnection(dest).then((conn) => {
conn_ = conn;
return work(conn);
}).then(() => {
if(conn_) {
conn_.end(); // on success, disconnect the socket (ie dispose of conn_).
}
}, () => {
if(conn_) {
conn_.end(); // on error, disconnect the socket (ie dispose of conn_).
}
});
// Note: with Bluebird promises, simplify .then(fn,fn) to .finally(fn).
}
function getConnection(dest) {
return new Promise((resolve, reject) => {
let conn = promisifyConnection(new Client());
conn.on('ready', () => {
resolve(conn);
})
.on('error', reject)
.connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
});
}
function promisifyConnection(conn) {
conn.execAsync = (command) => { // promisify conn.exec()
return new Promise((resolve, reject) => {
conn.exec(command, (err, stream) => {
if(err) {
reject(err);
} else {
let streamSegments = []; // array in which to accumulate streamed data
stream.on('close', (err) => {
if(err) {
reject(err);
} else {
resolve(streamSegments.join('')); // or whatever is necessary to combine the accumulated stream segments
}
}).on('data', (data) => {
streamSegments.push(data);
}).stderr.on('data', function(data) {
reject(new Error(data)); // assuming `data` to be String
});
stream.end('ls -l\nexit\n'); // not sure what this does?
}
});
});
};
// ... promisify any further Client methods here ...
return conn;
}
NOTES:
the promisification of conn.exec() includes an assumption that data may be received in a series of segments (eg packets). If this assumption is not valid, then the need for the streamSegments array disappears.
getConnection() and promisifyConnection() could be written as one function but with separate function it's easier to see what's going on.
getConnection() and promisifyConnection() keep all the messy stuff well away from the application code.

Related

Why isn't my async function waiting for the promise to be fulfilled

I am using ldapjs to query users from an ldap server.
If I put all the code just in a single script without using functions, the query works and I get the results I need.
I am now trying to use expressjs to serve a rest endpoint to enable querying of the ldap server, so I moved the ldapjs client.search code into a async function with a promise surrounding the actual search code.
After the promise code, I have a line which exercises the promise using await and stores the results of the promise in a variable. I then return that variable to the calling function which will eventually send the results back as a json-formatted string to the requesting browser.
The problem I am seeing is that the console.log() of the returned results is undefined and appears before the console.log statements inside the promise code. So it looks like the async function is returning before the promise is fulfilled, but I don't see why because in all the examples of promises and async/await I have seen this scenario works correctly.
Below is a sample script without the expressjs part to just make sure everything works correctly.
// script constants:
const ldap = require('ldapjs');
const assert = require('assert');
const ldapServer = "ldap.example.com";
const adSuffix = "dc=example,dc=com"; // test.com
const client = getClient();
const fullName = "*doe*";
var opts = {
scope: "sub",
filter: `(cn=${fullName})`,
attributes: ["displayName", "mail", "title", "manager"]
};
console.log("performing the search");
let ldapUsers = doSearch(client, opts);
console.log("Final Results: " + ldapUsers);
function getClient() {
// Setup the connection to the ldap server
...
return client;
}
async function doSearch(client, searchOptions) {
console.log("Inside doSearch()");
let promise = new Promise((resolve, reject) => {
users = '{"users": [';
client.search(adSuffix, searchOptions, (err, res) => {
if (err) {
console.log(err);
reject(err)
}
res.on('searchEntry', function(entry) {
console.log("Entry: " + users.length);
if (users.length > 11) {
users = users + "," + JSON.stringify(entry.object);
} else {
users = users + JSON.stringify(entry.object);
}
});
res.on('error', function(err) {
console.error("Error: " + err.message);
reject(err)
});
res.on('end', function(result) {
console.log("end:");
client.unbind();
users = users + "]}";
resolve(users)
});
});
});
// resolve the promise:
let result = await promise;
console.log("After promise has resolved.");
console.log(result);
return result
}
The output from the console.log statements is as follows:
Setting up the ldap client.
ldap.createClient succeeded.
performing the search
Inside doSearch()
Final Results: [object Promise]
Entry: 11
end:
After promise has resolved.
{"users": [{"dn":"cn=john_doe"}]}
I did strip out the code which creates the ldapjs client and redacted the company name, but otherwise this is my code.
Any ideas on why the doSearch function is returning before the promise is fulfilled would be greatly appreciated.
As #danh mentioned in a comment, you're not awaiting the response from doSearch. Since doSearch is an async function it will always return a promise, and thus must be awaited.
As a quick and dirty way to do that you could wrap your call in an immediately invoked asynchronous function like so:
// ...
(async () => console.log(await doSearch(client, opts)))();
// ...
For more info you might check out the MDN docs on asynchronous functions
I think there are a few issues in the provided code snippet. As #danh pointed out you need to await the doSearch call. However you may have not done that already since you may not be using an environment with a top async. This likely means you'll want to wrap the call to doSearch in an async function and call that. Assuming you need to await for the search results.
// script constants:
const ldap = require('ldapjs');
const assert = require('assert');
const ldapServer = "ldap.example.com";
const adSuffix = "dc=example,dc=com"; // test.com
const client = getClient();
const fullName = "*doe*";
function getClient() {
// Setup the connection to the ldap server
...
return client;
}
async function doSearch(client, searchOptions) {
console.log("Inside doSearch()");
return new Promise((resolve, reject) => {
users = '{"users": [';
client.search(adSuffix, searchOptions, (err, res) => {
if (err) {
console.log(err);
reject(err)
}
res.on('searchEntry', function(entry) {
console.log("Entry: " + users.length);
if (users.length > 11) {
users = users + "," + JSON.stringify(entry.object);
} else {
users = users + JSON.stringify(entry.object);
}
});
res.on('error', function(err) {
console.error("Error: " + err.message);
reject(err)
});
res.on('end', function(result) {
console.log("end:");
client.unbind();
users = users + "]}";
console.log(result);
resolve(users)
});
});
});
}
const opts = {
scope: "sub",
filter: `(cn=${fullName})`,
attributes: ["displayName", "mail", "title", "manager"]
};
(async function runAsyncSearch () {
console.log("performing the search");
try {
const ldapUsers = await doSearch(client, opts); // Await the async results
console.log("After promise has resolved.");
console.log("Final Results: " + ldapUsers);
} catch (err) {
console.error(err.message);
}
})(); // Execute the function immediately after defining it.

Async function does not wait for await function to end

i have an async function that do not work as expected, here is the code :
const onCreateCoachSession = async (event, context) => {
const { coachSessionID } = context.params;
let coachSession = event.val();
let opentokSessionId = 'prout';
await opentok.createSession({ mediaMode: 'relayed' }, function(
error,
session
) {
if (error) {
console.log('Error creating session:', error);
} else {
opentokSessionId = session.sessionId;
console.log('opentokSessionIdBefore: ', opentokSessionId);
const sessionId = session.sessionId;
console.log('Session ID: ' + sessionId);
coachSession.tokbox = {
archiving: true,
sessionID: sessionId,
sessionIsCreated: true,
};
db.ref(`coachSessions/${coachSessionID}`).update(coachSession);
}
});
console.log('opentokSessionIdEnd: ', opentokSessionId);
};
My function onCreateCoachSession trigger on a firebase event (it's a cloud function), but it does not end for opentok.createSession to end, i don't understand why as i put an await before.
Can anyone have an idea why my code trigger directly the last console log (opentokSessionIdEnd)
Here is a screenshot on order of console.log :
It's probably a simple problem of async/await that i missed but i cannot see what.
I thanks in advance the community for the help.
You're using createSession in callback mode (you're giving it a callback function), so it doesn't return a Promise, so it can't be awaited.
Two solutions :
1/ Use createSession in Promise mode (if it allows this, see the doc)
let session = null;
try{
session = await opentok.createSession({ mediaMode: 'relayed' })
} catch(err) {
console.log('Error creating session:', error);
}
or 2/ await a Promise
let session;
try {
session = await new Promise((resolve, reject) => {
opentok.createSession({ mediaMode: 'relayed' }, (error, session) => {
if (error) {
return reject(error)
}
resolve(session);
})
})
} catch (err) {
console.log('Error creating session:', err);
throw new Error(err);
}
opentokSessionId = session.sessionId;
console.log('opentokSessionIdBefore: ', opentokSessionId);
// ...
await means it will wait till the promise is resolved. I guess there is no promise returned in this case. you can create your own promise and handle the case

await promise before callback in async.each

router.post('/runCommand', async function(req, res){
let results = [];
async.each(req.body.requests, async function(request, callback){
const data = await connect(request.command)
await results.push(data);
await callback(null);
}, function(err){
if (!err) {
res.send(202, results)
}
})
})
Res.send is never taking place and callback seems to happen before connect is finished running. Connect is succesfully returning a promise because this
router.get('/topics', async function(req, res) {
console.log('in get');
const data = await connect(req.body.command);
await res.send(data);
});
works fine. But including the async.each to run multiple commands seems broken. I know this is an issue with how I'm calling async.each callback function but research hasn't availed how I should be calling it. Is it possible to use a .then() after awaiting a promise?
function connect(command){
return new Promise(function(resolve) {
let host = {
server: {
host: "host",
port: "port",
userName: "user",
password: config.Devpassword
},
commands: [ command ]
};
var SSH2Shell = require ('ssh2shell'),
//Create a new instance passing in the host object
SSH = new SSH2Shell(host),
//Use a callback function to process the full session text
callback = function(sessionText){
console.log(sessionText)
resolve(sessionText);
}
SSH.connect(callback);
})
};
While you could continue to sink more time into getting async.each() to work, I recommend just dropping it and going exclusively with the async / await syntax which simplifies your code a lot:
router.post('/runCommand', async function (req, res) {
try {
const results = await Promise.all(
req.body.requests.map(({ command }) => connect(command))
);
res.send(202, results);
} catch ({ message, stack }) {
res.send(500, { error: message, stack });
}
})
Looking at the ssh2shell documentation, I think your connect function could be improved as well for better readability and error handling:
const SSH2Shell = require('ssh2shell');
function connect (command) {
return new Promise((resolve, reject) => {
const host = {
server: {
host: 'host',
port: 'port',
userName: 'user',
password: config.Devpassword
},
commands: [command]
};
//Create a new instance passing in the host object
const SSH = new SSH2Shell(host);
SSH.on('error', reject);
SSH.connect(resolve);
});
}
Please feel free to comment if this still doesn't work for you.

How to wait for socket connect within rest request?

(This is my first ever contact with Javascript. ...)
Hi, I have to write a rest api with Express, which is no problem due to the huge amount of examples everywhere.
Though, within a rest request I've to contact another tcp server (for modbus). So, within such a request I've to wait for the connect/ready event from socket.connect(). What's a proper way to do that?
What I came up with is encapsulating the socket.connect() in a Promise. The stripped code looks like:
function get_holding() {
const socket = new net.Socket();
const client = new modbus.client.TCP(socket); /* = require('jsmodbus') */
return new Promise(function (resolve, reject) {
// fulfill the Promise, proceed in the chain
socket.on('ready', function () { resolve(); });
// close connection after receiving the response
socket.on('data', function () { socket.end(); });
socket.on('error', function (error) { reject(error); });
socket.connect(/* ip and port */);
})
.then(function () {
// could be above in socket.on('ready', ...),
// but splitting looks better
return client.readHoldingRegisters(0, 2);
})
.then(function (response) {
// process response (code stripped)
return /* processed values */;
})
.catch(function (error) {
throw error;
});
}
function controller_get_holding(req, res) {
get_holding()
.then(function (values) {
res.json(values);
})
.catch(function (error) {
res.send(require('util').inspect(arguments, {depth: null}));
});
}
...
app.route('/holding')
.get(controller_get_holding);
Is that the way to go?
Just found promise-socket, which is all I need.

Nodejs, close mongo db connection via callback

I have the problem with callbacks, async thinking etc.
Execution program:
Connect to mongoDb.
Create url - https://example.com + add part from locArray.
Send get request (for each).
Save data to mongo db.
Close connection.
Problem:
If the connection was closed on last line in jsonDataFromApi - "server instance pool was destroyed" before all data from each request was saved to db
So callback(db) was sent to another place - closeMongoDb
but error was appeared
"Cannot read property 'close' of undefined".
I think, the problem is with async, send callbacks etc.
const MongoClient = require('mongodb').MongoClient;
const Array = require('node-array');
const request = require('request');
var locationArray = [
'location1',
'location2',
'location3',
'location4'
];
var dataFromLocApi = (loc, callback) => {
request({
url: `https://example.com/${loc}`,
json: true
}, (error, response, body) => {
if (error){
callback('Error connection to url.');
} else{
callback(undefined, body.result);
}
});
};
var jsonDataFromApi = (urldb, callback) => {
MongoClient.connect(urldb, (err, db) => {
if (err) {
console.log('MongoDb connection error.');
}
console.log('MongoDb - connected.');
locationArray.forEachAsync(function(loc, index, arr) {
dataFromLocApi(loc, (errorMessage, results) => {
if (errorMessage) {
console.log(errorMessage);
} else {
console.log(JSON.stringify(results, undefined, 2));
db.collection('testCollection').insert(results, function(error, record) {
if (error)
throw error;
console.log("data saved");
});
}
});
}, function() {
console.log('complete');
});
callback(db);
});
}
var closeMongoDb = (urldb, callback) => {
jsonDataFromApi(urldb, (error, db) => {
if (error){
callback('Close connection - failure');
} else{
db.close();
console.log('MongoDb connections was closed.');
}
});
}
closeMongoDb('mongodb://127.0.0.1:27017/testDb', (err, db) => {
console.log('DONE');
} );
There is definitely a problem with asynchrony there.
You're not waiting for the items to be processed before calling the db.close().
Also, the functions that you have defined have the unclear semantics. For example, the function closeMongoDb should basically close the DB and that's it. But here does the other job: fetches the data and closes the DB afterwards.
Also, I'd probably use the async module instead of node-array as the last one seems to solve other problem.
I've refactored the code. Please read my comments. I tried to make it as clear as possible.
const MongoClient = require("mongodb").MongoClient;
const request = require("request");
// We are going to use the async module
// This is a classical module to handle async behavior.
const async = require("async");
// As you can see this function accepts a callback
// If there is an error connecting to the DB
// it passes it up to the caller via callback(err)
// This is a general pattern
const connectToDb = function(urldb, callback) {
MongoClient.connect(urldb, (err, db) => {
if (err) {
console.log("MongoDb connection error.");
callback(err);
return;
}
// If everything is OK, pass the db as a data to the caller.
callback(undefined, db);
});
};
// This method fetches the data for a single location.
// The logic with errors/data is absolutely the same.
const getData = (loc, callback) => {
request(
{
url: `https://example.com/${loc}`,
json: true
},
(error, response, body) => {
if (error) {
callback("Error connection to url.");
return;
}
callback(undefined, body.result);
}
);
};
// This function goes over each location, pulls the data and saves it to the DB
// Last parameter is a callback, I called it allDataFetchedCb to make it clear
// that we are calling it after ALL the locations have been processed
// And everything is saved to the DB.
const saveDataFromLocations = function(locations, db, allDataFetchedCb) {
// First param here is an array of items
// The second one is an async function that we want to execute for each item
// When a single item is processed we call the callback. I named it 'locProcessedCB'
// So it's clear what happens.
// The third parameter is a callback that is going to be called when ALL the items
// have been processed.
async.each(
locations,
function(loc, locProcessedCb) {
getData(loc, (apiErr, results) => {
if (apiErr) {
console.log(apiErr);
// Well, we couldn't process the item, pass the error up.
locProcessedCb(apiErr);
return;
}
console.log(
`Obtained the data from the api: ${JSON.stringify(
results,
undefined,
2
)}`
);
db.collection("testCollection").insert(results, function(dbError) {
if (dbError) {
// Also an error, we couldn't process the item.
locProcessedCb(dbError);
return;
}
// Ok the item is processed without errors, after calling this
// So we tell the async.each function: ok, good, go on and process the next one.
locProcessedCb();
});
});
},
function(err) {
// We gonna get here after all the items have been processed or any error happened.
if (err) {
allDataFetchedCb(err);
return;
}
console.log("All the locations have been processed.");
// All good, passing the db object up.
allDataFetchedCb(undefined, db);
}
);
};
// This function is an entry point.
// It calls all the above functions one by one.
const getDataAndCloseDb = function(urldb, locations, callback) {
//Well, let's connect.
connectToDb(urldb, (err, db) => {
if (err) {
callback(err);
return;
}
// Now let's get everything.
saveDataFromLocations(locations, db, (err, db) => {
if (err) {
callback(err);
return;
}
// If somehow there is no db object, or no close method we wanna know about it.
if (!db || !db.close) {
callback(new Error("Unable to close the DB Connection."));
}
// Closing the DB.
db.close(err => {
// If there's no error err === undefined or null
// So this call is equal to callback(undefined);
callback(err);
});
});
});
};
const locationArray = ["location1", "location2", "location3", "location4"];
// Finally calling the function, passing all needed data inside.
getDataAndCloseDb("mongodb://127.0.0.1:27017/testDb", locationArray, err => {
if (err) {
console.error(
`Unable to fetch the data due to the following reason: ${err}`
);
return;
}
console.log("Done successfully.");
});
I didn't run this code as I don't have the URL etc. So please try it yourself and debug if needed.

Categories

Resources