I have the following Node.js code to ssh into a server and it works great forwarding stdout, but whenever I type anything it is not forwarding to the server. How do I forward my local stdin to the ssh connections stdin?
var command = 'ssh -tt -i ' + keyPath + ' -o StrictHostKeyChecking=no ubuntu#' + hostIp;
var ssh = child_proc.exec(command, {
env: process.env
});
ssh.stdout.on('data', function (data) {
console.log(data.toString());
});
ssh.stderr.on('data', function (data) {
console.error(data.toString());
});
ssh.on('exit', function (code) {
process.exit(code);
});
There's two ways to go about this if you want to pipe the process.stdin to the child process:
Child processes have a stdin property that represents the stdin of the child process. So all you should need to do is add process.stdin.pipe(ssh.stdin)
You can specify a custom stdio when spawning the process to tell it what to use for the child process's stdin:
child_proc.exec(command, { env: process.env, stdio: [process.stdin, 'pipe', 'pipe'] })
Also, on a semi-related note, if you want to avoid spawning child processes and have more programmatic control over and/or have more lightweight ssh/sftp connections, there is the ssh2 module.
Related
I have a Node.js process and this process forks an sh child process to run a bash script. Something like this:
const cp = require('child_process');
const n = cp.spawn('sh',['foo.sh'], {
stdio: ['ignore','ignore','ignore','ipc']
});
in my bash script (foo.sh), how can I send an IPC message back to the Node.js parent process? Cannot find out how to do that.
Doing some more research, looks like I will be getting closer to the IPC internals. One thing that might help is if I pass the parent PID to the bash script, then maybe I can do something with that.
When you add 'ipc' to your stdio options, the parent process and child process will establish a communication channel, and provide a file descriptor for the child process to use. This descriptor will be defined in your environment as $NODE_CHANNEL_FD. You can redirect output to this descriptor and it will be sent to the parent process to be parsed and handled.
As a simple example, I sent my name from the bash script to the parent process to be logged.
index.js
const cp = require('child_process');
const n = cp.spawn('sh', ['foo.sh'], {
stdio: ['ignore', 'ignore', 'ignore', 'ipc']
});
n.on('message', (data) => {
console.log('name: ' + data.name);
});
foo.sh
printf "{\"name\": \"Craig\"}\n" 1>&$NODE_CHANNEL_FD
Essentially what is happening in the bash file is:
I'm using the printf command to send the JSON to stdout, file descriptor 1.
And then redirecting it to a reference (&) of the $NODE_CHANNEL_FD
Note that the JSON you send must be properly formatted and terminated with a \n character
If you wanted to send data from the parent process to the bash process you could add
n.send({"message": "hello world"});
to your JavaScript, and in the bash file you could use something along the lines of
MESSAGE=read -u $NODE_CHANNEL_FD
echo " => message from parent process => $MESSAGE"
Note that you will have to change your stdio options so that you are not ignoring the standard output of the child process. You could set them to ['ignore', 1, 'ignore', 'ipc'] so that the child process' standard output goes straight to the parent's.
I have node file that is running a karma test in a node app using the karma public api (I'll save writing out the code because it comes straight from http://karma-runner.github.io/0.13/dev/public-api.html).
All is fine so far, the test runs. Now I need to start serving different files to the karma run. For example, I might have exampleSpec.js, example1.js, example2.js, and example3.js. I need to serve exampleSpec and then example1-3 in sequence.
However, I don't see any documentation on this, and can't seem to get anywhere on.
So, The answer ended up being pretty simple. The first argument to the server constructor is a config object, that can replace or augment the karma.conf.js, so it is posible to send in altered files arrays. Code below for posterity:
"use strict";
var Server = require('karma').Server;
var filePath = process.cwd();
filePath += "/karma.conf.js";
console.log(filePath);
//files needs to be an array of string file matchers
function runTests(files, port) {
var config = {
configFile: filePath,
files: files,
port: port
};
var server = new Server(config, function(exitCode) {
console.log('Karma has server exited with ' + exitCode);
process.exit(exitCode)
});
server.on('run_complete', function (browser, result) {
console.log('A browser run was completed');
console.log(result);
});
server.start();
}
runTests(['test/**/*Spec.js', 'tmp/example.js'], 9876);
runTests(['test/**/*Spec.js', 'tmp/example2.js'], 9877);
runTests(['test/**/*Spec.js', 'tmp/example3.js'], 9878);
I am trying to understand about the SSH2 for nodeJS https://github.com/mscdex/ssh2, but I can't find more documentation, does anybody has more related to the client events?
I am trying to connect from my Node project to a Unix server and execute commands from node using this library SSH2 such as mkdir, ls or some other unix commands but I can't find any documentation of how to do it.
I am able to establish connection, but now I need to execute unix commands
var Client = require('ssh2').Client;
//
var conn = new Client();
conn.on('ready', function() {
console.log('Client :: ready');
conn.sftp(function(err, sftp) {
if (err) throw err;
console.log('Client :: SSH open');
output.add("ls");
conn.end();
});
}).connect({
host: 'hostname',
port: 22,
username: 'user',
password: 'password'
});
Or if this is not achievable with SSH2 can somebody recommend another Node Library were I can connect to an unix server? I have a Node application that creates a HTML file but after the creation I need to name it in base of the last file name that I have in a unix server, create the directory in unix and the upload it using sftp-upload plugin. Trying to achieve this with node instead of doing manually.
You can do this with node-exec or ssh2-exec which uses ssh2. ssh2 is low level so for exec you can use other libs like ssh2-exec and ssh2-connect.
There are plenty of examples in README.md.
Example (from ssh2-exec README):
connect = require('ssh2-connect');
exec = require('ssh2-exec');
connect({host: localhost}, function(err, ssh){
child = exec({cmd: 'ls -la', ssh: ssh}, function(err, stdout, stderr){
console.log(stdout);
});
child.stdout.on('data', function(data){
console.log(stdout);
});
child.on('exit', function(code){
console.log('Exit', code);
});
})
I've an app which initially creates static config files (once) and after files were written I need to reinitialize/restart the application.
Is there something to restart a node.js app from itself?
This is required cause I've an application running in two runlevels in node.js.
The initial one starts completly synchronus and after this level has been completed app is in async runlevel in a previously started environment.
I know there are tools like nodemon but that's not what I need in my case.
I tried to kill the app via process.kill() which is working but I can't listen to the kill event:
// Add the listener
process.on('exit', function(code) {
console.log('About to exit with code:', code);
// Start app again but how?
});
// Kill application
process.kill();
Or is there a better, cleaner way to handle this?
Found a working case to get node.js restarted from app itself:
Example:
// Optional part (if there's an running webserver which blocks a port required for next startup
try {
APP.webserver.close(); // Express.js instance
APP.logger("Webserver was halted", 'success');
} catch (e) {
APP.logger("Cant't stop webserver:", 'error'); // No server started
APP.logger(e, 'error');
}
// First I create an exec command which is executed before current process is killed
var cmd = "node " + APP.config.settings.ROOT_DIR + 'app.js';
// Then I look if there's already something ele killing the process
if (APP.killed === undefined) {
APP.killed = true;
// Then I excute the command and kill the app if starting was successful
var exec = require('child_process').exec;
exec(cmd, function () {
APP.logger('APPLICATION RESTARTED', 'success');
process.kill();
});
}
The only con I can see here is to loose outputs on console but if anything is logged into logfiles it's not a problem.
I am working with node.js, and I am trying to embed a console in the web browser to work in a remote server. The web application do the connection so the user does not need to do the ssh username#host but only type commands.
I have tried the node.js' ssh2 module and other modules which use ssh2. But I'm experiencing always the same problem. Every time I execute a command programmatically using exec(), the ssh session is restarted. I'll explain it better with an example.
> ls
returns the content of home directory, one of the directories in the home directory is mydir
> cd mydir
> ls
returns the content of my home directory again, because after a command is executed the ssh session is closed/restarted.
Any node.js library which can do the job? or even a library of other technology different to javascript?
Edit: Other example for clarifying, using the node.js' module ssh-exec
The server has to execute some commands in other machine using ssh. A function in the server contains the following code
var c = exec.connection('username#host.com'); // It takes the ssh key from the default location
exec('cd mydir', c).pipe(process.stdout);
exec('ls -lh', c).pipe(process.stdout);
As you can see I am not ending the connection after the first exec but the output I obtain is the content of the home directory not the content of mydir directory, because the ssh session is reset after each exec.
The maintainer of node.js' ssh2 module provided the solution.
To use the method shell() instead of the method exec().
The method shell() creates an interactive session with the server we are connecting.
The method shell() provides a stream as a parameter of its callback (like the method exec()).
Like when using exec(), stream.on('data', function(data, extended) {...}); can be used to get the output of the commands. However, in this case, to provide commands (input) to the machine you connected with, you need to use stream.write(yourcommand+'\n');
PS. Feel free to edit to improve the accuracy of the answer.
I have to guess a bit, but you do something like child = exec('ssh username#host ls')?
You can do something like
child = exec('ssh username#host');
upfront and in the "loop" of your browser
child.stdin.write('ls\n');
When finished, just close stdin:
child.stdin.end()
which also finishes the child process.
I know this link is old but I figured this may help someone if they're looking for a solution. The
To use the method shell() instead of the method exec().
Works. Here's another solution. Use absolute file paths i.e.
conn.exec("mkdir -p /home/user/Direc/{one,two,three}/", function(err, stream) {
if (err) throw err;
stream.on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
});
conn.exec("ls -la /home/user/", function(err, stream) {
if (err) throw err;
stream.on('close', function(code, signal) {
console.log('Stream :: close :: code: ' + code + ', signal: ' + signal);
conn.end();
}).on('data', function(data) {
console.log('STDOUT: ' + data);
}).stderr.on('data', function(data) {
console.log('STDERR: ' + data);
});
});