Defining stdout and stderr in a child.process in node.js 8 - javascript

I am trying to start. child process in my node dev-server, to set the API json-server before launching it when I use the command :
nom run dev -- reset
I am stuck with the stdout and stderr parameters being null, with the error :
cp.stdout.on('data', (data) => {
TypeError: Cannot read property 'on' of null
here is my piece of code. Feedback appreciated...
const cp = spawn('cp', [dbResetJson, dbJson], { stdio: 'inherit' })
cp.stdout.on('data', (data) => {
console.log(`stdout: ${data}`);
});
cp.stderr.on('data', (data) => {
console.log(`stderr: ${data}`);
});
cp.on('close', (code) => {
console.log(`child process exited with code ${code}`)
apiServer.listen(3000, () => {
console.log(chalk.green('Starting DB-JSON test Server... at ' + '3000' + '\n'))
})
})
} else {
console.log(chalk.green('Use DB-JSON test Server in current state'))
apiServer.listen(3000, () => {
console.log(chalk.green('Starting DB-JSON test Server... at ' + '3000' + '\n'))
})
}

That's because you're using stdio: 'inherit', which means that the child process will use the parent process's stdin, stdout and stderr (in which case it doesn't make sense to attach a listener to stdout).
You probably want to just use the default (pipe):
const cp = spawn('cp', [dbResetJson, dbJson]);
// which does the same as:
const cp = spawn('cp', [dbResetJson, dbJson], { stdio: 'pipe' });

Related

Node.js spawning parent process and capturing subprocess output

const options = {
cwd: process.cwd(),
shell: true,
stdio: 'inherit',
}
const firstCommand = 'pktmon stop'
const secondCommand = 'pktmon filter remove'
const thirdCommand = 'pktmon filter add --ethertype 0x88cc'
const foruthCommand = 'pktmon start --etw --pkt-size 0 --log-mode real-time --comp nics'
const child = require('child_process').spawn(`${firstCommand} && ${secondCommand} &&
${thirdCommand} && ${foruthCommand}`,[], options)
//none of child outputs are triggered by subprocess output
child.stdout.on('data', function(data){
console.log('stdout', data.toString())
})
child.stderr.on('data', function (data) {
console.log('stderr',data.toString())
})
child.on('message', (data) => {
console.log('message',data)
})
Subprocess output is showed on the terminal but how can i actually capture them and save output to a variable?
the reason im using stdio: 'inherit' is because - when i'm not using it the terminal is freezing when last command being executed..

Jest Node.js Not able to call beforeAll and afterAll properly using separate file

I have defined beforAll and afterAll in a separate file bootstrap.js but I am not able to do integration testing. I am using serverless stack. I took help from github but that example was written in mocha so I tried to transform it to jest.
bootstrap.js
beforeAll(async () => {
console.log('[Tests Bootstrap] Start');
await startSlsOffline((err) => {
if (err) {
console.log(err);
}
console.log('[Tests Bootstrap] Done');
});
}, 30000);
afterAll(async () => {
console.log('[Tests Teardown] Start');
await stopSlsOffline();
console.log('[Tests Teardown] Done');
});
handler.test.js
describe('get Endpoints', () => {
const server = request(`http://localhost:3005`);
test('should run get example', async () => {
const res = await server.get('/example');
console.log('res', res.body);
});
});
My jest configuration is
module.exports = {
verbose: true,
bail: true,
coverageDirectory: 'output/coverage/jest',
setupFilesAfterEnv: [ './bootstrap.js' ]
};
The output I get is
> jest --config test/jest.config.js
FAIL test/handler.test.js
get Endpoints
✕ should run get example (38ms)
● get Endpoints › should run get example
connect ECONNREFUSED 127.0.0.1:3005
console.log test/bootstrap.js:6
[Tests Bootstrap] Start
console.log test/bootstrap.js:30
Serverless: Offline started with PID : 5587 and PORT: 3005
console.log test/bootstrap.js:18
[Tests Teardown] Start
console.log test/bootstrap.js:47
Serverless Offline stopped
console.log test/bootstrap.js:22
[Tests Teardown] Done
Test Suites: 1 failed, 1 total
Tests: 1 failed, 1 total
Snapshots: 0 total
Time: 2.825s
Ran all test suites.
npm ERR! Test failed. See above for more details.
The global setup doesn't work the way you are expecting it to work. If you see the logs, your beforeAll logs are coming after your test executes. You should use different way to setup and teadown. Jest has concept of globalSetup and globalTeardown and I guess that fits better in your case. As part of this you can start and stop your server. The config will look like this
Read more here - https://jestjs.io/docs/en/configuration#globalsetup-string
module.exports = {
verbose: true,
bail: true,
coverageDirectory: 'output/coverage/jest',
globalSetup: "./bootstrap.js",
globalTeardown: "./bootstrap.js"
};
And your bootstrap will looks like this
const { spawn} = require('child_process');
let slsOfflineProcess;
module.exports = async () => {
console.log('[Tests Bootstrap] Start');
await startSlsOffline((err) => {
if (err) {
console.log(err);
}
console.log('[Tests Bootstrap] Done');
});
}
const startSlsOffline = (done) => {
if (slsOfflineProcess) {
slsOfflineProcess.kill('SIGINT');
console.log('Serverless Offline stopped');
done();
}
slsOfflineProcess = spawn('sls', [ 'offline', 'start', '--port', 3005 ]);
console.log(`Serverless: Offline started with PID : ${slsOfflineProcess.pid} and PORT: 3005`);
slsOfflineProcess.stdout.on('data', (data) => {
if (data.includes('Offline listening on')) {
console.log(data.toString().trim());
done();
}
});
slsOfflineProcess.stderr.on('data', (errData) => {
console.log(`Error starting Serverless Offline:\n${errData}`);
done(errData);
});
};
I solved my issue using this config with help of 1st answer using globalSetup and globalTeardown.
Read more here - https://jestjs.io/docs/en/configuration#globalsetup-string
bootstrap.js
const { spawn } = require('child_process');
let slsOfflineProcess;
module.exports = async () => {
console.log('[Tests Bootstrap] Start');
await startSlsOffline().catch((e) => {
console.error(e);
return;
});
global.__SERVERD__ = slsOfflineProcess;
};
function startSlsOffline() {
slsOfflineProcess = slsOfflineProcess = spawn('sls', [ 'offline', 'start', '--port', 3005 ]);
return finishLoading();
}
const finishLoading = () =>
new Promise((resolve, reject) => {
slsOfflineProcess.stdout.on('data', (data) => {
if (data.includes('Serverless: Offline [HTTP] listening on')) {
console.log(data.toString().trim());
console.log(`Serverless: Offline started with PID : ${slsOfflineProcess.pid}`);
resolve('ok');
}
if (data.includes('address already in use')) {
reject(data.toString().trim());
}
});
slsOfflineProcess.stderr.on('data', (errData) => {
console.log(`Error starting Serverless Offline:\n${errData}`);
reject(errData);
});
});
teardown.js
module.exports = async function() {
let slsOfflineProcess = global.__SERVERD__;
slsOfflineProcess.stdin.write('q\n');
slsOfflineProcess.stdin.pause();
await slsOfflineProcess.kill('SIGINT');
console.log('Serverless Offline stopped');
};
Find a sample on this link here: https://github.com/bilalsha/sls-test-jest
P.S globalTeardown do not works if test fails. I will post solution once I have it.

How to pass stream input/output between a main process and a child process in Node.js

I'm trying to get two processes to "talk" to each other via stdio:
ping.js
import { readline } from '../readline';
import { sleep } from '../sleep';
import { spawn } from 'child_process';
const spawnPongProcess = () => {
const child = spawn('node',
['-r', 'esm', `${__dirname}/pong.js`],
{ stdio: 'pipe' });
child.stderr.on('data', (data) => {
console.error(`stderr: ${data}`);
});
child.on('close', (code) => {
console.error(`child process exited with code ${code}`);
process.stdin.resume();
process.stdout.resume();
});
child.on('error', (err) => {
console.error(`child process error: ${err}`);
});
process.stdout.pipe(child.stdin);
child.stdout.pipe(process.stdin);
return child;
};
export const ping = async () => {
const child = spawnPongProcess();
await sleep(1000);
console.log('ping');
let pong = readline();
console.error(`Ping received: ${pong}`);
child.kill();
};
I pipe the parent process' stdout to the child process stdin and the child process stdout to the parent process stdin in an effort to allow the processes to communicate via stdio.
pong.js
import { readline } from '../readline';
import { sleep } from '../sleep';
const pong = async () => {
console.log(`Pong initiated and waiting for input.`);
let ping = readline();
console.log(`Pong received: ${ping}`);
process.exit();
};
pong();
readline.js
import { question } from 'readline-sync';
export const readline = question;
sleep.js
export const sleep = (ms) => {
return new Promise((resolve) => setTimeout(resolve, ms));
};
The output is:
$ node -r esm src/index.js
Pong initiated and waiting for input.
ping
It appears that the output from the parent process (ping) is not getting through to the child process (pong). Any ideas on how to make it work?
You piped your process' stdout (Writable) to child's stdin (Writable) and vice versa. Since data is received on the stdin (Readable), you have to pipe it instead of stdout:
process.stdin.pipe(child.stdin);
child.stdout.pipe(process.stdout);
Your code doesn't throw because if the stdout is a terminal, it becomes a Duplex stream.

How do you create a terminal instance within a NodeJS child process?

I am setting up a discord channel to function as an SSH terminal. A NodeJS server will provide the connection. A custom command will spawn a new terminal instance, which can then be used as a shell.
I don't know how to spawn a terminal within a child process. I have tried using the screen and bash commands to no avail.
I am using CentOS 7.
// Code For Discord
var $discord = {
currentInterface: null,
send: (data) => {
/* some code that sends data to a discord channel */
},
receive: (data) => {
// Send Data To Terminal
if ($discord.currentInterface) {
$discord.currentInterface.send(data);
} else {
$discord.send('**Error:** Terminal has not been spawned.');
}
},
command: (name, args) => {
// Recieve Discord Commands
switch (name) {
case 'spawn':
$discord.currentInterface = $interface();
break;
}
}
};
// Create Interface
var $interface = function () {
// Define object
let x = {
terminal: child_process.spawn('screen'),
send: (data) => {
// Send Input to Terminal
x.process.stdin.write(data + '\n');
},
receive: (data) => {
// Send Output to Discord
$discord.send(data);
}
};
// Process Output
x.terminal.on('stdout', (data) => {
x.receive(data);
});
// Process Errors
x.terminal.on('stderr', (error) => {
x.receive(`**Error:**\n${error}`);
});
// Return
return x;
};
The problem lies with creating the terminal itself. How do you create an SSH-style shell within a child process?
After realizing how much of an idiot I really am, I found a solution...
// Import Modules
const fs = require('fs');
const child_process = require('child_process');
// Create Interface
var interface = {
terminal: child_process.spawn('/bin/sh'),
handler: console.log,
send: (data) => {
interface.terminal.stdin.write(data + '\n');
},
cwd: () => {
let cwd = fs.readlinkSync('/proc/' + interface.terminal.pid + '/cwd');
interface.handler({ type: 'cwd', data: cwd });
}
};
// Handle Data
interface.terminal.stdout.on('data', (buffer) => {
interface.handler({ type: 'data', data: buffer });
});
// Handle Error
interface.terminal.stderr.on('data', (buffer) => {
interface.handler({ type: 'error', data: buffer });
});
// Handle Closure
interface.terminal.on('close', () => {
interface.handler({ type: 'closure', data: null });
});
Usage...
interface.handler = (output) => {
let data = '';
if (output.data) data += ': ' + output.data.toString();
console.log(output.type + data);
};
interface.send('echo Hello World!');
// Returns: data: Hello World!
interface.send('cd /home');
interface.cwd();
// Returns: cwd: /home
interface.send('abcdef');
// Returns: error: bin/sh: line 2: abcdef: command not found
interface.send('exit');
// Returns: exit
I'd take a look at the documentation for child_process.execFile. There's an option to set the shell on, but it's disabled by default.
There's also this approach if you want to try setting up a batch script. This is set up for windows and the answer isn't set up for passing arguments, but you should be able to adapt it fairly easily.

Data stream handlers and CLI incorporation Node JS

I am trying to create an interactive CLI that can run serial commands. I have two files serialcomms.js and cli.js. Serialcomms.js contains the connection, handlers, and command functions. cli.js contains the commander information.
My issue is that I can only call the send command once because the listeners/handlers take over from the serialcomms file. What would be the best method to loop the cli program so I can call the send command over and over again, but still have the serial handlers running and output to stdout? Would I need to use a child process? Or recursion to have the cli call itself?
Example behavior I am expecting with an echo bot on the other end of the serial line.
Send hello
hello
Send Bye
Bye
Behavior I am experiencing
Send hello
hello
endless wait
Here is my serialcomms.js
const SerialPort = require('serialport');
const ReadLine = require('#serialport/parser-readline');
let portName = `/dev/pts/${process.argv[2]}` || '/dev/pts/6';
let baudRate = process.argv[3] || 115200;
let myPort = new SerialPort(portName, {baudRate: baudRate})
let parser = myPort.pipe(new ReadLine({ delimiter: '\n' }))
myPort.on('open', () => {
console.log(`port ${portName} open`)
})
parser.on('data', (data) => {
console.log(data)
})
myPort.on('close', () => {
console.log(`port ${portName} closed`)
})
myPort.on('error', (err) => {
console.error('port error: ' + err)
})
function send(data){
myPort.write(JSON.stringify(data)+'\n', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log(`${data} sent`);
});
}
module.exports = {
send
}
Here is my CLI.js file
const program = require('commander');
const {send} = require('./serialcomms');
program
.version('1.0.0')
.description('Serial Tester')
program
.command('send <msg>')
.alias('s')
.description('send a message over serial')
.action((msg)=>{
send(msg)
})
program.parse(process.argv)

Categories

Resources