I have this code:
var convert = spawn("gm", ["convert"].concat(parameters));
// output stream
obj.outStream = new BufferList();
convert.stderr.pipe(process.stderr, { end: false });
// put the output in the stream TOO
convert.stdout.pipe(obj.outStream);
// send the image to the input
obj.stream.pipe(throttle).pipe(convert.stdin);
How can I suppress the output of the "convert" process, without suppressing the input and the output to obj.outStream too?
The reason is because I don't want to output that to the user, as it does now.
What you're probably seeing in the output is convert.stderr because you are piping it to process.stderr, which is the error output of your child process' master. When you spawn a child process, by default, no stdio is handled.
var spawn = require('child_process').spawn'
var child = spawn('gm', ['convert']);
The code that you've shown is that you're directly piping the child's stderr to the main process' stderr and piping stdout to your outStream. That means the only possible output you can see is convert.stderr.
To fix this, ignore stderr.
var obj = {
outStream: new BufferList()
};
var spawn = require('child_process').spawn'
var child = spawn('gm', ['convert'], {
stdio: ['pipe', obj.outStream, 'ignore']
});
With this, you have the stdin stream as it normally is, stdout piped to obj.outStreamm and stderr ignored.
See the docs you can use listeners or use the 'ignore' property.
Related
I have a Javascript file being executed by Nodejs as a script in a Unix shell pipeline. The script needs to read (a few thousand lines) of text from STDIN, parse it for some bits, then return a json stream on STDOUT with the results of its work.
var readline = require('readline');
var rl = readline.createInterface({
input: process.stdin,
output: process.stdout,
terminal: false
});
Then I have some functions to process the data, then I setup an event listener that gets called on each line of input and does all the processing I need and appends it to an array:
function extract_references (data) {
...
references.push(item);
}
rl.on('line', extract_references);
So far so good, my data array ends up populated with all the components I want. The trouble comes when I try to write it back out to STDOUT. I've tried a couple things being triggered by the close event on the input stream:
function output_references () {
var output = JSON.stringify(references, null, ' ');
process.stdout.write(output);
}
rl.on('close', output_references);
This works fine if the input stream is relatively short (a couple hundred lines) but when I crank it up to my full data set I start getting truncated output and an error such as:
write error: Resource temporarily unavailable
...from the next command in my pipeline.
I've also tried using rl.write(), but that doesn't seem to give me anything at all on STDOUT. What am I doing wrong and what is the correct way to handle a stream like this?
I'm trying to convert ansi color codes from console output into HTML. I have a found a script to do this but I cant seem to make it parse the strings inside node js. I have tried to JSON.stringify it to also include special chars but its not working.
forever list
[32minfo[39m: Forever processes running
[90mscript[39m [37mforever[39m [37mpid[39m [37mid[39m
[90mdata[39m: [37m [39m [37muid[39m [90mcommand[39m
I get output like this back from ssh2shell in node js. I have a script:
https://github.com/pixelb/scripts/blob/master/scripts/ansi2html.sh
This is supposed to convert the above to html and add the appropriate color codes. It works fine with normal terminal output for example:
npm install --color=always | ansi2html.sh > npminstall.html
This is the raw output on the linux machine piped to a file. It seems the JS strings are missing these escapes when they are shown in console.log but they are also missing newlines there. Perhaps its because im concatenating them directly into the string and its removing special chars?
total 24
-rwxr-xr-x 1 admin admin 17002 May 13 02:52 ^[[0m^[[38;5;34mansi2html.sh^[[0m
drwxr-xr-x 4 admin admin 4096 May 13 00:00 ^[[38;5;27mgit^[[0m
-rw-r--r-- 1 admin admin 0 May 13 02:57 ls.html
Hopefully some of this makes sense.
Thanks
There are a couple of filters that SSH2shell applies to the output from commands. The first removes non-standard ASCII from the response and then the colour formatting codes are removed.
In v1.6.0 I have added pipe()/unpipe(), the events for both and exposed the stream.on('data', function(data){}) event so you can access the stream output directly without SSH2shell interacting with it in any way.
This should resolve the problem of not getting the right output from SSH2shell by giving you access to the raw data.
var fs = require('fs')
var host = {
server: {
host: mydomain.com,
port: 22,
userName: user,
password: password:)
},
commands: [
"`Test session text message: passed`",
"msg:console test notification: passed",
"ls -la"
],
}
//until npm published use the cloned dir path.
var SSH2Shell = require ('ssh2shell')
//run the commands in the shell session
var SSH = new SSH2Shell(host),
callback = function( sessionText ){
console.log ( "-----Callback session text:\n" + sessionText);
console.log ( "-----Callback end" );
},
firstLog = fs.createWriteStream('first.log'),
secondLog = fs.createWriteStream('second.log'),
buffer = ""
//multiple pipes can be added but they wont be bound to the stream until the connection is established
SSH.pipe(firstLog).pipe(secondLog);
SSH.on('data', function(data){
//do something with the data chunk
console.log(data)
})
SSH.connect(callback)
tried this ?
https://github.com/hughsk/ansi-html-stream
var spawn = require('child_process').spawn
, ansi = require('ansi-html-stream')
, fs = require('fs')
var npm = spawn('npm', ['install', 'browserify', '--color', 'always'], {
cwd: process.cwd()
})
var stream = ansi({ chunked: false })
, file = fs.createWriteStream('browserify.html', 'utf8')
npm.stdout.pipe(stream)
npm.stderr.pipe(stream)
stream.pipe(file, { end: false })
stream.once('end', function() {
file.end('</pre>\n')
})
file.write('<pre>\n');
It seems for big enough stdout output node does not fire data events:
Example bash script:
#!/usr/bin/env sh
for i in {1..100000}; do
echo $i
done
Running the above script from bash should prints all numbers from 1 to 100k one number per line
However when proxying it via node's exec:
#!/usr/bin/env node --harmony --harmony_destructuring --harmony_default_parameters
const {exec} = require('child_process');
let stdout = '';
const child = exec('./tmp.sh');
child.stdout.on('data', data => {
stdout += data;
});
child.stdout.on('close', code => {
if (code) process.exit(code);
console.log(stdout.split('\n').slice(0, 3).join('\n'));
console.log('...');
console.log(stdout.split('\n').slice(-3).join('\n'));
});
For the above script, I'd expect to get:
1
2
3
...
99998
99999
100000
However when run It returns:
1
2
3
...
35984
35985
About a third of the output is truncated. Logging inside the data handler shows that only 1 data event is fired with 1 chunk
I've tried listening to the child.on('exit') event instead with the same result (also stdout.on('exit'))
It works the same when replacing exec with spawn
node --version # v5.4.1
osx 10.10
child_process.exec() buffers the standard output into a fixed size buffer. You are likely filling the buffer and thus you don't get the rest of the output.
There is a maxBuffer option you can pass to .exec() to make the buffer larger or you can use .spawn() and receive an unlimited stream of output.
See Stdout buffer issue using node child_process for related info.
Also worth reading: Difference between spawn and exec of Node.js child_process and Stdout of Node.js child_process exec is cut short.
In perl if you need to run a batch file it can be done by following statement.
system "tagger.bat < input.txt > output.txt";
Here, tagger.bat is a batch file, input.txt is the input file and output.txt is the output file.
I like to know whether it is possible to do it be done in Node.js or not? If yes, how?
You will need to create a child process. Unline Python, node.js is asynchronous meaning it doesn't wait on the script.bat to finish. Instead, it calls functions you define when script.bat prints data or exists:
// Child process is required to spawn any kind of asynchronous process
var childProcess = require("child_process");
// This line initiates bash
var script_process = childProcess.spawn('/bin/bash',["test.sh"],{env: process.env});
// Echoes any command output
script_process.stdout.on('data', function (data) {
console.log('stdout: ' + data);
});
// Error output
script_process.stderr.on('data', function (data) {
console.log('stderr: ' + data);
});
// Process exit
script_process.on('close', function (code) {
console.log('child process exited with code ' + code);
});
Apart from assigning events to the process, you can connect streams stdin and stdout to other streams. This means other processes, HTTP connections or files, as shown below:
// Pipe input and output to files
var fs = require("fs");
var output = fs.createWriteStream("output.txt");
var input = fs.createReadStream("input.txt");
// Connect process output to file input stream
script_process.stdout.pipe(output);
// Connect data from file to process input
input.pipe(script_process.stdin);
Then we just make a test bash script test.sh:
#!/bin/bash
input=`cat -`
echo "Input: $input"
And test text input input.txt:
Hello world.
After running the node test.js we get this in console:
stdout: Input: Hello world.
child process exited with code 0
And this in output.txt:
Input: Hello world.
Procedure on windows will be similar, I just think you can call batch file directly:
var script_process = childProcess.spawn('test.bat',[],{env: process.env});
I'm trying to write a function, that would use native openssl to do some RSA heavy-lifting for me, rather than using a js RSA library. The target is to
Read binary data from a file
Do some processing in the node process, using JS, resulting in a Buffer containing binary data
Write the buffer to the stdin stream of the exec command
RSA encrypt/decrypt the data and write it to the stdout stream
Get the input data back to a Buffer in the JS-process for further processing
The child process module in Node has an exec command, but I fail to see how I can pipe the input to the process and pipe it back to my process. Basically I'd like to execute the following type of command, but without having to rely on writing things to files (didn't check the exact syntax of openssl)
cat the_binary_file.data | openssl -encrypt -inkey key_file.pem -certin > the_output_stream
I could do this by writing a temp file, but I'd like to avoid it, if possible. Spawning a child process allows me access to stdin/out but haven't found this functionality for exec.
Is there a clean way to do this in the way I drafted here? Is there some alternative way of using openssl for this, e.g. some native bindings for openssl lib, that would allow me to do this without relying on the command line?
You've mentioned spawn but seem to think you can't use it. Possibly showing my ignorance here, but it seems like it should be just what you're looking for: Launch openssl via spawn, then write to child.stdin and read from child.stdout. Something very roughly like this completely untested code:
var util = require('util'),
spawn = require('child_process').spawn;
function sslencrypt(buffer_to_encrypt, callback) {
var ssl = spawn('openssl', ['-encrypt', '-inkey', ',key_file.pem', '-certin']),
result = new Buffer(SOME_APPROPRIATE_SIZE),
resultSize = 0;
ssl.stdout.on('data', function (data) {
// Save up the result (or perhaps just call the callback repeatedly
// with it as it comes, whatever)
if (data.length + resultSize > result.length) {
// Too much data, our SOME_APPROPRIATE_SIZE above wasn't big enough
}
else {
// Append to our buffer
resultSize += data.length;
data.copy(result);
}
});
ssl.stderr.on('data', function (data) {
// Handle error output
});
ssl.on('exit', function (code) {
// Done, trigger your callback (perhaps check `code` here)
callback(result, resultSize);
});
// Write the buffer
ssl.stdin.write(buffer_to_encrypt);
}
You should be able to set encoding to binary when you make a call to exec, like..
exec("openssl output_something_in_binary", {encoding: 'binary'}, function(err, out, err) {
//do something with out - which is in the binary format
});
If you want to write out the content of "out" in binary, make sure to set the encoding to binary again, like..
fs.writeFile("out.bin", out, {encoding: 'binary'});
I hope this helps!