Running go script in a Node application [duplicate] - javascript

In a node.js, I'd like to find a way to obtain the output of a Unix terminal command. Is there any way to do this?
function getCommandOutput(commandString){
// now how can I implement this function?
// getCommandOutput("ls") should print the terminal output of the shell command "ls"
}

This is the method I'm using in a project I am currently working on.
var exec = require('child_process').exec;
function execute(command, callback){
exec(command, function(error, stdout, stderr){ callback(stdout); });
};
Example of retrieving a git user:
module.exports.getGitUser = function(callback){
execute("git config --global user.name", function(name){
execute("git config --global user.email", function(email){
callback({ name: name.replace("\n", ""), email: email.replace("\n", "") });
});
});
};

If you're using node later than 7.6 and you don't like the callback style, you can also use node-util's promisify function with async / await to get shell commands which read cleanly. Here's an example of the accepted answer, using this technique:
const { promisify } = require('util');
const exec = promisify(require('child_process').exec)
module.exports.getGitUser = async function getGitUser () {
// Exec output contains both stderr and stdout outputs
const nameOutput = await exec('git config --global user.name')
const emailOutput = await exec('git config --global user.email')
return {
name: nameOutput.stdout.trim(),
email: emailOutput.stdout.trim()
}
};
This also has the added benefit of returning a rejected promise on failed commands, which can be handled with try / catch inside the async code.

You're looking for child_process
var exec = require('child_process').exec;
var child;
child = exec(command,
function (error, stdout, stderr) {
console.log('stdout: ' + stdout);
console.log('stderr: ' + stderr);
if (error !== null) {
console.log('exec error: ' + error);
}
});
As pointed out by Renato, there are some synchronous exec packages out there now too, see sync-exec that might be more what yo're looking for. Keep in mind though, node.js is designed to be a single threaded high performance network server, so if that's what you're looking to use it for, stay away from sync-exec kinda stuff unless you're only using it during startup or something.

Requirements
This will require Node.js 7 or later with a support for Promises and Async/Await.
Solution
Create a wrapper function that leverage promises to control the behavior of the child_process.exec command.
Explanation
Using promises and an asynchronous function, you can mimic the behavior of a shell returning the output, without falling into a callback hell and with a pretty neat API. Using the await keyword, you can create a script that reads easily, while still be able to get the work of child_process.exec done.
Code sample
const childProcess = require("child_process");
/**
* #param {string} command A shell command to execute
* #return {Promise<string>} A promise that resolve to the output of the shell command, or an error
* #example const output = await execute("ls -alh");
*/
function execute(command) {
/**
* #param {Function} resolve A function that resolves the promise
* #param {Function} reject A function that fails the promise
* #see https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
*/
return new Promise(function(resolve, reject) {
/**
* #param {Error} error An error triggered during the execution of the childProcess.exec command
* #param {string|Buffer} standardOutput The result of the shell command execution
* #param {string|Buffer} standardError The error resulting of the shell command execution
* #see https://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback
*/
childProcess.exec(command, function(error, standardOutput, standardError) {
if (error) {
reject();
return;
}
if (standardError) {
reject(standardError);
return;
}
resolve(standardOutput);
});
});
}
Usage
async function main() {
try {
const passwdContent = await execute("cat /etc/passwd");
console.log(passwdContent);
} catch (error) {
console.error(error.toString());
}
try {
const shadowContent = await execute("cat /etc/shadow");
console.log(shadowContent);
} catch (error) {
console.error(error.toString());
}
}
main();
Sample Output
root:x:0:0::/root:/bin/bash
[output trimmed, bottom line it succeeded]
Error: Command failed: cat /etc/shadow
cat: /etc/shadow: Permission denied
Try it online.
Repl.it.
External resources
Promises.
child_process.exec.
Node.js support table.

Thanks to Renato answer, I have created a really basic example:
const exec = require('child_process').exec
exec('git config --global user.name', (err, stdout, stderr) => console.log(stdout))
It will just print your global git username :)

You can use the util library that comes with nodejs to get a promise from the exec command and can use that output as you need. Use destructuring to store the stdout and stderr in variables.
const util = require('util');
const exec = util.promisify(require('child_process').exec);
async function lsExample() {
const {
stdout,
stderr
} = await exec('ls');
console.log('stdout:', stdout);
console.error('stderr:', stderr);
}
lsExample();

you can use ShellJS package.
ShellJS is a portable (Windows/Linux/OS X) implementation of Unix shell commands on top of the Node.js API.
see: https://www.npmjs.com/package/shelljs#execcommand--options--callback
import * as shell from "shelljs";
//usage:
//exec(command [, options] [, callback])
//example:
const version = shell.exec("node --version", {async: false}).stdout;
console.log("nodejs version", version);

Here's an async await TypeScript implementation of the accepted answer:
const execute = async (command: string): Promise<any> => {
return new Promise((resolve, reject) => {
const exec = require("child_process").exec;
exec(
command,
function (
error: Error,
stdout: string | Buffer,
stderr: string | Buffer
) {
if (error) {
reject(error);
return;
}
if (stderr) {
reject(stderr);
return;
} else {
resolve(stdout);
}
}
);
});
};

Related

Difference between callback and promise when using `net.connect`

I've built a simple script to detect if a port is opened/closed on my machine. I've successfully make it work using the old-fashion callback way, but can't make it work with Promise (async/await).
Any clue why the two scripts below are not working exactly the same? Using callbacks, it works neat. Using Promise (async/await) it crash by throwing an "uncaughtException" error.
✅ Using callbacks
const net = require('node:net')
/**
* Connect to port with callback
*/
function connectToPort(port, callback) {
const client = net.connect(port, 'localhost', function () {
callback(true)
})
client.on('error', function (err) {
callback(err)
})
}
/**
* Connect
*/
async function test(port) {
console.log(`Trying to connect to port "${port}"...`)
// Connect with callback
connectToPort(port, function (result) {
console.log({ port, open: result === true })
})
}
// Output:
// Trying to connect to port "4242"...
// { port: 4242, open: false }
test(4242)
❌ Using Promise (async/await)
const net = require('node:net')
/**
* Connect to port with Promise
*/
function asyncConnectToPort(port) {
return new Promise(function (resolve, reject) {
const client = net.connect(port, 'localhost', function () {
resolve(true)
})
client.on('error', function (err) {
reject(err)
})
})
}
/**
* Connect
*/
async function test(port) {
console.log(`Trying to connect to port "${port}"...`)
// Connect with promise
const result = await asyncConnectToPort(port)
console.log({ port, open: result === true })
}
// Output:
// Trying to connect to port "4242"...
// Error: connect ECONNREFUSED 127.0.0.1:4242
test(4242)
Both scripts look exactly the same to me. Apparently, the "error" event must be present to avoid Nodejs to throw an "uncaughtException". That "special" event is detected when using callback, but I suspect it's not with Promise. Could it be something behind the scene that differs when working with await/async script?
An error event is raised in both your code examples.
In the Promise code that you wrote you pass that error event to reject().
When you call reject you raise an exception.
Hence, you get an exception in your Promise based code but not in the other code. You added one!.
Handling it is a case of:
try {
const result = await asyncConnectToPort(port)
console.log({ port, open: result === true })
} catch (e) {
// do something with e here
}
However a Promise can only be settled once.
The code you've written will call resolve when the callback to net.connect runs and it will call reject on an error event but these are not mutually exclusive.
It's possible for your code to end up calling both resolve and reject, and also for it to call reject multiple times.

Node async operations on child_process are not ending

I have a little script that executes a childprocess using execFile. This child process is a node script too that runs async operations but it seems like that the async are never ending so the terminal and all the processes are on hold.
This is the main script that runs the execFile for the child process:
fs.readdir(directoryPath, function(err, files) {
if (err) console.log(`Error: ${err}`);
files.map((file) => {
execFile(`node`, ["updater.js", "BMW", file], (error, stdout, stderr) => {
if (error) {
red(`error: ${error.message}`);
return;
}
if (stderr) {
red(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
});
});
And this is the node script executed as child process:
const args = process.argv.slice(2);
const brand = args[0];
const model = args[1];
const data = readJSON(`./json-export/${brand}/${model}`);
const generations = data.generations;
const generationsDB = await getGenerationsByModelAndBrand(brand, model);
console.log(generationsDB);
generations.map((generation) => {
const lastModification =
generation.modifications.modification[
generation.modifications.modification.length - 1
];
console.log(lastModification);
});
All the code works if I comment the const generationsDB line and the next console.log. If not when execution hits to the async request the execution gets stucked there.
Tested the getGenerationsByModelAndBrand on the main script and works with no issue.
The getGenerationsByModelAndBrand runs a query on database and returns a Promise.
This is the getGenerationsByModelAndBrand method code:
export const getGenerationsByModelAndBrand = (brand, model) => {
return new Promise((resolve, reject) => {
const sql = `DATABASE SELECT`;
connection.query(sql, function(error, result) {
if (error) return reject(error);
return resolve(result);
});
});
};
connection comes from mysql.createConnection method from the mysql package.
I believe that the issue comes from the promise handling on the child process is like I'm missing something bu couldn't find what it is.
Edit:
After researching I didn't found a solution or explanation for this issue therefore in the meantime I moved the getGenerationsByModelAndBrand to the parent script and pass the result as parameter.

NodeJS - Looping through Array Sequentially with Timeout between each Element in Array

I have a list of commands in an array that I need to run in order:
const commands = [
`git clone https://github.com/EliLillyCo/${repo}.git`,
`cd ${repo}`, `git checkout -b ${branch}`,
'cp ../codeql-analysis.yml .github/workflows/',
'git add .github/workflows/codeql-analysis.yml',
`git push --set-upstream origin ${branch}`,
'cd ../',
`rm -r ${repo}`,
];
They need to be ran in order as the commands rely on the previous command being ran.
Also, each command needs to have a 3 second wait before running the next command, because sometimes commands take time, especially command 1 and command 5.
I am using a standard for loop which is then using setTimeout() that calls a function to run the commands, as such:
const a = require('debug')('worker:sucess');
const b = require('debug')('worker:error');
const { exec } = require('child_process');
function execCommand(command) {
exec(command, (error, stdout, stderr) => {
if (error) {
b(`exec error: ${error}`);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
});
}
const commands = [
`git clone https://github.com/EliLillyCo/${repo}.git`,
`cd ${repo}`, `git checkout -b ${branch}`,
'cp ../codeql-analysis.yml .github/workflows/',
'git add .github/workflows/codeql-analysis.yml',
`git push --set-upstream origin ${branch}`,
'cd ../',
`rm -r ${repo}`,
];
for (let i = 0; i < commands.length; i++) {
setTimeout(execCommand(commands[i]), 3000);
}
But there is something wrong with the setTimeout() as it's returning this:
worker:error TypeError [ERR_INVALID_CALLBACK]: Callback must be a function. Received undefined
What is the best way to approach the problem of looping through an array sequentially, whilst using a timeout?
I'd make execCommand return a promise so you know when it's done; you can't rely on timeouts (what if the task takes more than three seconds?) and since most of those commands will complete much faster than that, the timeouts hold things up unnecessarily.
Here's execCommand returning a promise:
function execCommand(command) {
return new Promise((resolve, reject) => {
exec(command, (error, stdout, stderr) => {
if (error) {
b(`exec error: ${error}`);
reject(error);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
resolve();
});
});
}
Then if you have top-level await available (modern Node.js and ESM modules):
// If you have top-level `await` available
try {
for (const commmand of commands) {
await execCommand(command);
}
} catch (error) {
// ...report/handle error...
}
If you don't, wrap it in an async IIFE:
(async () => {
for (const commmand of commands) {
await execCommand(command);
}
})().catch(error => {
// ...report/handle error...
});
Alternatively, you could use util.promisify on exec directly if you wanted to separately the execution from the handling of stdout/stderr, but doing them together was the minimal change to what you had, so that's what I stuck with.
Currenty you can't guarantee that the previous command will be completed when calling the next one. You call the next one automatically after 3000ms, but the previous one can take longer than expected and not be over yet.
You should add a mechanism to await each command, then launch the next one. Here's how using async/await :
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const commands = [ ... ];
const execCommand = async (command) => {
try {
await exec(command)
} catch (error) {
b(`exec error: ${error}`);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
}
(async () => {
for (let command of commands) {
await execCommand(command);
}
})();

Sinon crypto stub for method within a callback

I'm trying to test a simple function that generates a random name using the nodejs crypto library. I'm using sinon to stub out a method call within the callback of pseudoRandomBytes but the stub doesn't seem to be called. Example:
getFileName.js
const crypto = require('crypto');
module.exports = (req, file, cb) => {
crypto.pseudoRandomBytes(32, (err, raw) => {
try{
cb(err, err ? undefined : crypto.createHash('MD5').update(raw).digest('hex'));
} catch(err) {
cb(err);
}
});
};
Test (running in mocha)
it('Crypto Error: createHash', function () {
const crypto = require('crypto');
const expectedError = new Error('stub error occurred');
let cryptoStub = sinon.stub(crypto, 'createHash').throws(expectedError);
let callback = sinon.spy();
getFileName(null, null, callback);
cryptoStub.restore();
sinon.assert.calledWith(callback, expectedError);
});
I would expect the above test to throw once createHash gets called. If I move the crypto.createHash call outside of the callback (before the pseudoRandomNumber call) it works just fine. I a bit of a newbie so my basic understanding of what sinon and nodejs are doing could be completely wrong. Any help would be much appreciated.
The reason why it seems like createHash() wasn't called was because you were making an assertion before the callback call was complete due to asynchronous function.
Promise with async/await will work. Another method which doesn't involve changing your module to use promise is to do your assertions within the callback.
it('Crypto Error: createHash', function (done) {
const crypto = require('crypto');
const expectedError = new Error('stub error occurred');
let cryptoStub = sinon.stub(crypto, 'createHash').throws(expectedError);
getFileName(null, null, function (err, hash) {
sinon.assert.match(err, expectedError);
cryptoStub.restore();
done();
});
});
This way, you can check that the callback is called with the expected error. One way to confirm this is you can change line 4 to .throws('some other error') and the test will fail.
The problem is that crypto.pseudoRandomBytes() is an async function, so the rest of your test code executes before your callback. That way, your stub is restored before your function is actually used it.
In order to make it properly work, you should update your getFileName.js so it returns a promise - that way you can await it
module.exports = (req, file, cb) => {
return new Promise((resolve, reject) => {
crypto.pseudoRandomBytes(32, (err, raw) => {
try{
cb(err, err ? undefined : crypto.createHash('MD5').update(raw).digest('hex'));
resolve();
} catch(err) {
reject(cb(err));
}
});
});
};
and then in your test
// added async
it('Crypto Error: createHash', async () => {
const crypto = require('crypto');
const expectedError = new Error('stub error occurred');
let cryptoStub = sinon.stub(crypto, 'createHash').throws(expectedError);
let callback = sinon.spy();
await getFileName(null, null, callback);
// once we are here, the callback has already been executed and the promise that getFileName resolved.
cryptoStub.restore();
sinon.assert.calledWith(callback, expectedError);
});

node js async function return Promise { <pending> }

i had try to test out an encryption stuff and im new to nodejs.
after several try and search over google, i unable to solve my problem.
please help.
case: calling async method to encrypt data, however it return me with a Promise { <pending> }
im using npm openpgp
objective: return the ciphertext so i could use it for other purpose
my code as below:
//execution.js
var tools = require('./tools');
console.log(tools.encrypt());
//tools.js
const openpgp = require('openpgp') // use as CommonJS, AMD, ES6 module or via window.openpgp
var fs = require('fs');
openpgp.initWorker({ path:'openpgp.worker.js' }) // set the relative web worker path
var pubkey = fs.readFileSync('public.key', 'utf8');
const passphrase = `super long and hard to guess secret` //what the privKey is encrypted with
module.exports = {
encrypt:async () =>{
const options = {
message: openpgp.message.fromText('Hello, World!'), // input as Message object
publicKeys: (await openpgp.key.readArmored(pubkey)).keys, // for encryption
}
const encrypted = await openpgp.encrypt(options);
const ciphertext = encrypted.data;
fs.writeFile('message.txt',ciphertext ,'utf8', function (err) {
if (err) throw err;
console.log('msg written!');
});
return ciphertext;
},
decrypt: async function(){
// your code here
}
};
please help
Async Await is simply syntactic sugar for promises an async function returns a promise.
You can't use await at the top level. What you can do is:
(async () => {
try {
console.log(await tools.encrypt());
} catch (e) {
console.log(e);
}
})();
// using promises
tools.encrypt().then(console.log).catch(console.log);
tools.encrypt().then(res => console.log(res))
this line from #mark meyer solve my problem.
i was trying to access the thing without have to declare the 'async' word and have access to the 'res' so i could use for other purpose
Thanks alot.

Categories

Resources