router.post('/runCommand', async function(req, res){
let results = [];
async.each(req.body.requests, async function(request, callback){
const data = await connect(request.command)
await results.push(data);
await callback(null);
}, function(err){
if (!err) {
res.send(202, results)
}
})
})
Res.send is never taking place and callback seems to happen before connect is finished running. Connect is succesfully returning a promise because this
router.get('/topics', async function(req, res) {
console.log('in get');
const data = await connect(req.body.command);
await res.send(data);
});
works fine. But including the async.each to run multiple commands seems broken. I know this is an issue with how I'm calling async.each callback function but research hasn't availed how I should be calling it. Is it possible to use a .then() after awaiting a promise?
function connect(command){
return new Promise(function(resolve) {
let host = {
server: {
host: "host",
port: "port",
userName: "user",
password: config.Devpassword
},
commands: [ command ]
};
var SSH2Shell = require ('ssh2shell'),
//Create a new instance passing in the host object
SSH = new SSH2Shell(host),
//Use a callback function to process the full session text
callback = function(sessionText){
console.log(sessionText)
resolve(sessionText);
}
SSH.connect(callback);
})
};
While you could continue to sink more time into getting async.each() to work, I recommend just dropping it and going exclusively with the async / await syntax which simplifies your code a lot:
router.post('/runCommand', async function (req, res) {
try {
const results = await Promise.all(
req.body.requests.map(({ command }) => connect(command))
);
res.send(202, results);
} catch ({ message, stack }) {
res.send(500, { error: message, stack });
}
})
Looking at the ssh2shell documentation, I think your connect function could be improved as well for better readability and error handling:
const SSH2Shell = require('ssh2shell');
function connect (command) {
return new Promise((resolve, reject) => {
const host = {
server: {
host: 'host',
port: 'port',
userName: 'user',
password: config.Devpassword
},
commands: [command]
};
//Create a new instance passing in the host object
const SSH = new SSH2Shell(host);
SSH.on('error', reject);
SSH.connect(resolve);
});
}
Please feel free to comment if this still doesn't work for you.
Related
Ok, so I know how to program in C# fairly well and I have started programming in JS recently (node js). To be honest I was in a bot of shock from async calls.
Let's say I have this code in C#:
var t_1 = SomeAsyncTask();
var t_2 = SomeOtherAsyncTask();
Task.WaitAll(t_1, t_2);
var res_1 = t_1.Result;
var res_2 = t_2.Result;
Is there a JS equivalent of this? So far I have managed this:
In User.js:
var express = require("express");
var router = express.Router();
var sqlDo = require("../../js_help/DatabasReq/sqlDo.js");
router.get("/", async function(req, res){
var json = sqlDo.ExecCommand("select * from Users");
res.send(json); //json.recordset
});
module.exports = router;
In sqlDo.js:
module.exports = {
ExecCommand: function(command){
// sql and config are defined before.
sql.connect(config, function () {
var request = new sql.Request();
request.query(command, function (err, recordset) {
if (err) console.log(err)
console.log(recordset.recordset);
return recordset;
});
});
}
};
My problem is that this code is running async. I have tried putting await to different places but nothing worked. So when I start my server it returns nothing. I can tell that it is completing a call because I let it read results into console.
Thanks for any help!
Btw: I have tried googling/stackoverlow-ing,.. But I was not able to find anything that would look like C# equivalent. Is it even possible to write it like in c#? Again thanks for every answer...
To make your ExecCommand function async, you have to make it return a Promise. Read about Promises for instance here
module.exports = {
ExecCommand: function(command){
return new Promise((resolve, reject) => { //return a Promise from the function
sql.connect(config, function () {
var request = new sql.Request();
request.query(command, function (err, recordset) {
if (err) {
reject(err); //if there is an error, reject the Promise
} else {
resolve(recordset); //if no error, resolve the Promise with the result
}
});
});
});
}
};
Depending on your SQL library, it may also support promises already, instead of callbacks
module.exports = {
ExecCommand: function(command) {
return sql.connect(config)
.then(() => {
return new sql.Request().query(command);
})
}
};
or with async/await
module.exports = {
ExecCommand: async function(command) {
await sql.connect(config);
return await new sql.Request().query(command);
}
};
Then you can call this function in the requesthandler either like this
router.get("/", async function(req, res){
try {
var json = await sqlDo.ExecCommand("select * from Users");
res.send(json);
catch (err) {
console.log(err);
res.sendStatus(500);
}
});
or like this
router.get("/", function(req, res){
sqlDo.ExecCommand("select * from Users")
.then(json => { //the promise resolved
res.send(json);
})
.catch(err => { //the promise rejected
res.sendStatus(500);
console.log(err);
});
});
I prefer the second variant. But that may be just my personal opinion ...
I'm using node and express to create server for my app. This is how my code looks like:
async function _prepareDetails(activityId, dealId) {
var offerInfo;
var details = [];
client.connect(function(err) {
assert.equal(null, err);
console.log("Connected correctly to server");
const db = client.db(dbName);
const offers_collection = db.collection('collection_name');
await offers_collection.aggregate([
{ "$match": { 'id': Id} },
]).toArray(function(err, docs) {
assert.equal(err, null);
console.log("Found the following records");
details = docs;
});
})
return details;
}
app.post('/getDetails',(request,response)=>{
var Id = request.body.Id;
var activityId = request.body.activityId;
_prepareDetails(activityId,Id).then(xx => console.log(xx));
response.send('xx');
})
While calling getDetails API I am getting
await is only valid in async function error (At line await offers_collection.aggregate)
I am also getting red underline while declaring async function. Node version I'm using is 11.x. I'm also using firebase API. What am I doing wrong here?
You are missing the async declaration on one of your functions. Here is the working code:
async function _prepareDetails(activityId, dealId) {
var offerInfo;
var details = [];
client.connect(async function(err) {
assert.equal(null, err);
console.log("Connected correctly to server");
const db = client.db(dbName);
const offers_collection = db.collection('collection_name');
await offers_collection.aggregate([
{ "$match": { 'id': Id} },
]).toArray(function(err, docs) {
assert.equal(err, null);
console.log("Found the following records");
details = docs;
});
})
return details;
}
app.post('/getDetails', async (request,response)=>{
var Id = request.body.Id;
var activityId = request.body.activityId;
let xx = await _prepareDetails(activityId,Id);
response.send('xx');
})
Await can only be used in an async function, because await is by definition asynchronous, and therefore must either use the callback or promise paradigm. By declaring a function as async, you are telling JavaScript to wrap your response in a promise. Your issue was on the following line:
client.connect(function(err) {
This is where I added the async as mentioned before.
client.connect(async function(err) {
You will notice I made your route use async also, because you would've had an issue before. Notice the two lines in your original code:
_prepareDetails(activityId,Id).then(xx => console.log(xx));
response.send('xx');
Your response would send before you even made your database call because you are not wrapping the response.send within the .then. You could move the response.send into the .then, but if you are going to use async/await, I would use it all the way. So your new route would look like:
app.post('/getDetails', async (request,response)=>{
var Id = request.body.Id;
var activityId = request.body.activityId;
let xx = await _prepareDetails(activityId,Id);
response.send('xx');
})
I am new to react/nodejs/express/javascript and have encountered the following problem:
I want to get a number, then post that number + 1, then I want to create a new js object using that number(newFreshId) and I want to add it to add that event to my schedulerData. When I try running the code, I can get and post to /api/num, but I everything after the .then(function(response) { doesnt appear to run.
I wanted to do this sequentially, so I used .then after every task so that I would not have encountered a problem.
I also tried to remove all the .thens in favor of a while loop that waits for the value to change. This also did not work.
CODE:
CLIENT:
this.newEvent = (schedulerData, slotId, slotName, start, end, type, item) => {
let newFreshId = 0;
let newEvent = {}
axios.get("/api/num").then(function(response) {
newFreshId = response.data[0] + 1;
// console.log(newFreshId);
}).then(function() {
axios.post("/api/num", {
id: newFreshId
}).then(function(response) {
console.log(response)
// handle success
newEvent = {
id: newFreshId,
title: this.state.title,
start: start,
end: end,
resourceId: slotId
};
schedulerData.addEvent(newEvent);
this.setState({
viewModel: schedulerData
});
// while(JSON.stringify(newEvent) === '{}'){
// console.log('waiting')
// }
console.log(newEvent)
schedulerData.addEvent(newEvent);
console.log(newEvent)
this.setState({
viewModel: schedulerData
});
})
})
};
SERVER:
app.get('/api/num', function(req, res) {
//console.log(require('./number.json'))
var fs = require('fs')
fs.readFile('./number.json', {encoding: 'utf-8'}, function(err,data){
if (!err) {
//console.log('received data: ' + JSON.parse(data));
res.json(JSON.parse(data))
} else {
console.log(err);
}})
})
app.post('/api/num', function(req, res) {
var id = req.body.id
var fs = require('fs');
fs.writeFileSync("./number.json", "[ "+id+" ]", function(err) {
if(err) {
return console.log(err);
}
res.status(200)
})
})
Thanks for all the help :)
fs.writeFileSync doesn't have a callback, so the function you're adding never gets executed: https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options
This means the response is never sent back to the client, and the axios promise is never resolved.
Try using fs.writeFile with a callback: https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback
It's also a good idea to send a response in the case of an error, too.
app.post('/api/num', function(req, res) {
var id = req.body.id
var fs = require('fs');
fs.writeFile("./number.json", "[ "+id+" ]", function(err) {
if(err) {
console.log(err);
return res.status(200)
}
res.status(200)
})
})
Finally, though it wouldn't help you in this situation, you should add a .catch to the very tail end of your axios chain. Any errors that take place in the promise chain will wind up there.
An example:
axios.get(specs).then(someFunction).catch(e => console.error(e));
for me I was facing the same issue and I figured it out that because I'm using some redirection after making a post request
window.location.href = "/{redirect to some rout}"
this makes the console change immediately so I couldn't see the then response unless I removed the redirection.
I too faced an issue where I wasn't able to run the .then() or .catch() code
const fetch = async() => {
axios.get("https://jsonplaceholder.typicode.com/todos/1")
.then((res) => {
console.log(res)
})
.catch(err => {
console.log(err)
})
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/axios/0.24.0/axios.min.js"></script>
All I did was add async to the function and it started working
const axios = require('axios').default;
use it like this
I'm building a node.js app which in production will act as a SSH client to many servers, some of which may be inaccessible at any given time. I'm trying to write a function which attempts to run a SSH command with each client in its config upon startup, and I'm not able to handle both successful sessions and those which end in error. I wrapped a ssh2 client in a promise. If I remove the third (trash) server and only successes result, this works fine! See the output:
STDOUT: Hello World
STDOUT: Hello World
Session closed
Session closed
Successful session: Hello World,Hello World
But if one of the connections times out, even though I handle the error, I don't get to keep any of my data. It looks like the error message overwrites all of the resolved promises
Successful session: Error: Timed out while waiting for handshake,Error:
Timed out while waiting for handshake,Error: Timed out while waiting
for handshake
Here's my code, forgive me if this is a bit scattered, as I've combined a few of my modules for the sake of this question. My goal is to keep the data from the successful session and gracefully handle the failure.
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let numLabs = Object.keys(labs).length;
let promises = [];
for(i=0;i<numLabs;i++){
let labName = Object.keys(labs)[i];
promises.push(asyncSSH("echo 'Hello World'", labs[labName]));
}
Promise.all(promises.map(p => p.catch(e => e)))
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e));
}
var sendSSH = function (command, dest, callback) {
var conn = new Client();
conn.on('ready', function() {
return conn.exec(command, function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
callback(null, data);
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data){
callback(err);
console.log('STDERR: ' + data);
}).on('close', function(err) {
if(err) {
console.log('Session closed due to error');
} else {
console.log('Session closed');
}
});
stream.end('ls -l\nexit\n');
});
}).on('error', function(err){
callback(err);
}).connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
};
function asyncSSH(command, dest) {
return new Promise(function(resolve, reject){
sendSSH(command, dest, function(err,data) {
if (!err) {
resolve(data);
} else {
reject(err);
}
});
});
}
checklabs();
How can I better use this promise wrapper to handle whatever errors come from the ssh2 client? Any tips are appreciated.
To get best use from each connection, you can (and arguably should) promisify separately :
the instatiation of each Client() instance
each instance's conn.exec() method (and any other asynchronous methods as required)
This will allow each instance of Client() to be reused with different commands (though not required in this example).
You should also be sure to disconnect each socket when its job is done, by calling client_.end(). For this, a "disposer pattern" is recommended.
With those points in mind and with a few assumptions, here's what I ended up with :
var Client = require('ssh2').Client;
const labs = {
"ny1": "192.168.1.2",
"ny2": "192.168.1.3",
"ny3": "1.1.1.1"
};
function checkLabs() {
let promises = Object.keys(labs).map((key) => {
return withConn(labs[key], (conn) => {
return conn.execAsync("echo 'Hello World'")
.catch((e) => "Error: " + e.message); // catch in order to immunise the whole process against any single failure.
// and inject an error message into the success path.
});
});
Promise.all(promises)
.then(results => console.log("Successful session: " + results))
.catch(e => console.log("Error! " + e.message)); // with individual errors caught above, you should not end up here.
}
// disposer pattern, based on https://stackoverflow.com/a/28915678/3478010
function withConn(dest, work) {
var conn_;
return getConnection(dest).then((conn) => {
conn_ = conn;
return work(conn);
}).then(() => {
if(conn_) {
conn_.end(); // on success, disconnect the socket (ie dispose of conn_).
}
}, () => {
if(conn_) {
conn_.end(); // on error, disconnect the socket (ie dispose of conn_).
}
});
// Note: with Bluebird promises, simplify .then(fn,fn) to .finally(fn).
}
function getConnection(dest) {
return new Promise((resolve, reject) => {
let conn = promisifyConnection(new Client());
conn.on('ready', () => {
resolve(conn);
})
.on('error', reject)
.connect({
host: dest,
port: 22,
username: 'root',
readyTimeout: 10000,
privateKey: require('fs').readFileSync('link-to-my-key')
});
});
}
function promisifyConnection(conn) {
conn.execAsync = (command) => { // promisify conn.exec()
return new Promise((resolve, reject) => {
conn.exec(command, (err, stream) => {
if(err) {
reject(err);
} else {
let streamSegments = []; // array in which to accumulate streamed data
stream.on('close', (err) => {
if(err) {
reject(err);
} else {
resolve(streamSegments.join('')); // or whatever is necessary to combine the accumulated stream segments
}
}).on('data', (data) => {
streamSegments.push(data);
}).stderr.on('data', function(data) {
reject(new Error(data)); // assuming `data` to be String
});
stream.end('ls -l\nexit\n'); // not sure what this does?
}
});
});
};
// ... promisify any further Client methods here ...
return conn;
}
NOTES:
the promisification of conn.exec() includes an assumption that data may be received in a series of segments (eg packets). If this assumption is not valid, then the need for the streamSegments array disappears.
getConnection() and promisifyConnection() could be written as one function but with separate function it's easier to see what's going on.
getConnection() and promisifyConnection() keep all the messy stuff well away from the application code.
my jest is not working as I expect it. See:
const res = {
send: (content) => {
expect(content).toEqual({
app_status: 501,
errors: {
jwt: {
location: 'body',
param: 'jwt',
value: undefined,
msg: 'The jwt is required'
}
}
});
console.log("after expect");
done();
},
};
Basically EVERYTHING after the expect(content).toEqual ... in res.send is not called. I find that very confusing. I am getting no error except for that my test's are taking too long (because done) is not called and the test is not "closed". So my question is, am I missing something obviously?
The following should work fine. I added asynchronous since send may be called asynchronously:
const createResponse = () => {
var resolve;
const p = new Promise(
(r,reject)=>resolve=r
);
return [
{
send: (value) => {
resolve(value)
}
},
p
];
};
test('(async) fail', done => {
//Router
const router = express.Router();
//Endpoint to fetch version
router.get('/api/version', (req, res) => {
setTimeout(x=>res.send('v1'),10)
});
const request = {
method: 'GET'
};
let [response,p] = createResponse()
router.stack[0].handle(request, response, () => {});
p.then(
x=>expect(x).toBe('v2'),
reject=>expect("should not reject").toBe(reject)
).then(
x=>done()
//,x=>done() //this will cause all tests to pass
);
});
To answer your question in the comment; consider the following code:
const express = require('express');
const router = express.Router();
//Endpoint to fetch version
router.get('/api/version', (req, res) => {
res.send("Hello World");
});
const request = {
method: 'GET'
};
const response = {
send: (value) => {
debugger;//pause here and see the stack
throw new Error("Hello Error.");
}
};
router.stack[0].handle(
request,
response,
//this is the function done in route.js:
// https://github.com/expressjs/express/blob/master/lib/router/route.js#L127
err => {
console.error(err.message);//console errors Hello Error.
}
);
Send throws an error but send is called by your mock which is called by express here. So express catches the exception and then ends up here (done is not from just but your callback).
So it'll call your callback with an error, skipping done from jist and not throwing anything (maybe showing something in log). Since your callback doesn't do anything it times out.
You could try to call done from jist in the callback (at console.error(err.message);).
[UPDATE]
Careful trying to catch the error thrown by expect the following will tell me 1 test passed:
test('(async) fail', done => {
try{
expect(true).toBe(false);
}catch(e){
}
done();
});