Node async operations on child_process are not ending - javascript

I have a little script that executes a childprocess using execFile. This child process is a node script too that runs async operations but it seems like that the async are never ending so the terminal and all the processes are on hold.
This is the main script that runs the execFile for the child process:
fs.readdir(directoryPath, function(err, files) {
if (err) console.log(`Error: ${err}`);
files.map((file) => {
execFile(`node`, ["updater.js", "BMW", file], (error, stdout, stderr) => {
if (error) {
red(`error: ${error.message}`);
return;
}
if (stderr) {
red(`stderr: ${stderr}`);
return;
}
console.log(stdout);
});
});
});
And this is the node script executed as child process:
const args = process.argv.slice(2);
const brand = args[0];
const model = args[1];
const data = readJSON(`./json-export/${brand}/${model}`);
const generations = data.generations;
const generationsDB = await getGenerationsByModelAndBrand(brand, model);
console.log(generationsDB);
generations.map((generation) => {
const lastModification =
generation.modifications.modification[
generation.modifications.modification.length - 1
];
console.log(lastModification);
});
All the code works if I comment the const generationsDB line and the next console.log. If not when execution hits to the async request the execution gets stucked there.
Tested the getGenerationsByModelAndBrand on the main script and works with no issue.
The getGenerationsByModelAndBrand runs a query on database and returns a Promise.
This is the getGenerationsByModelAndBrand method code:
export const getGenerationsByModelAndBrand = (brand, model) => {
return new Promise((resolve, reject) => {
const sql = `DATABASE SELECT`;
connection.query(sql, function(error, result) {
if (error) return reject(error);
return resolve(result);
});
});
};
connection comes from mysql.createConnection method from the mysql package.
I believe that the issue comes from the promise handling on the child process is like I'm missing something bu couldn't find what it is.
Edit:
After researching I didn't found a solution or explanation for this issue therefore in the meantime I moved the getGenerationsByModelAndBrand to the parent script and pass the result as parameter.

Related

NodeJS - Looping through Array Sequentially with Timeout between each Element in Array

I have a list of commands in an array that I need to run in order:
const commands = [
`git clone https://github.com/EliLillyCo/${repo}.git`,
`cd ${repo}`, `git checkout -b ${branch}`,
'cp ../codeql-analysis.yml .github/workflows/',
'git add .github/workflows/codeql-analysis.yml',
`git push --set-upstream origin ${branch}`,
'cd ../',
`rm -r ${repo}`,
];
They need to be ran in order as the commands rely on the previous command being ran.
Also, each command needs to have a 3 second wait before running the next command, because sometimes commands take time, especially command 1 and command 5.
I am using a standard for loop which is then using setTimeout() that calls a function to run the commands, as such:
const a = require('debug')('worker:sucess');
const b = require('debug')('worker:error');
const { exec } = require('child_process');
function execCommand(command) {
exec(command, (error, stdout, stderr) => {
if (error) {
b(`exec error: ${error}`);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
});
}
const commands = [
`git clone https://github.com/EliLillyCo/${repo}.git`,
`cd ${repo}`, `git checkout -b ${branch}`,
'cp ../codeql-analysis.yml .github/workflows/',
'git add .github/workflows/codeql-analysis.yml',
`git push --set-upstream origin ${branch}`,
'cd ../',
`rm -r ${repo}`,
];
for (let i = 0; i < commands.length; i++) {
setTimeout(execCommand(commands[i]), 3000);
}
But there is something wrong with the setTimeout() as it's returning this:
worker:error TypeError [ERR_INVALID_CALLBACK]: Callback must be a function. Received undefined
What is the best way to approach the problem of looping through an array sequentially, whilst using a timeout?
I'd make execCommand return a promise so you know when it's done; you can't rely on timeouts (what if the task takes more than three seconds?) and since most of those commands will complete much faster than that, the timeouts hold things up unnecessarily.
Here's execCommand returning a promise:
function execCommand(command) {
return new Promise((resolve, reject) => {
exec(command, (error, stdout, stderr) => {
if (error) {
b(`exec error: ${error}`);
reject(error);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
resolve();
});
});
}
Then if you have top-level await available (modern Node.js and ESM modules):
// If you have top-level `await` available
try {
for (const commmand of commands) {
await execCommand(command);
}
} catch (error) {
// ...report/handle error...
}
If you don't, wrap it in an async IIFE:
(async () => {
for (const commmand of commands) {
await execCommand(command);
}
})().catch(error => {
// ...report/handle error...
});
Alternatively, you could use util.promisify on exec directly if you wanted to separately the execution from the handling of stdout/stderr, but doing them together was the minimal change to what you had, so that's what I stuck with.
Currenty you can't guarantee that the previous command will be completed when calling the next one. You call the next one automatically after 3000ms, but the previous one can take longer than expected and not be over yet.
You should add a mechanism to await each command, then launch the next one. Here's how using async/await :
const util = require('util');
const exec = util.promisify(require('child_process').exec);
const commands = [ ... ];
const execCommand = async (command) => {
try {
await exec(command)
} catch (error) {
b(`exec error: ${error}`);
return;
}
a(`stdout: ${stdout}`);
b(`stderr: ${stderr}`);
}
(async () => {
for (let command of commands) {
await execCommand(command);
}
})();

Lambda function not running other async function even with await added to it

I have a Lambda function that isn't running an async function that I am using await on. I am guessing something else is async and I need to await it though I am unsure what can/should be await'ed as documentation for the redis package doesn't talk about promises or async that I saw. I tried putting await in front of the clienthmset... but VS Code says I can't await that. What are my options for getting this to run properly?
Here is the minimum reproducable code I have come up with, note the only things not included in this example are my logger, my imports and my client setup (has password and hostname)
const loadRedis = async (message) => {
client.hmset(`${message.region}:${message._id}`, message, (err, res) => {
if(err) {
logger.error(`Error: ${JSON.stringify(err)}`)
reject(err)
}
if(res) {
logger.info(`Response: ${JSON.stringify(res)}`)
resolve(res)
}
})
}
module.exports.loader = async (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false;
let input = JSON.parse(event.Records[0].body);
let message = input.Message
const result = await loadRedis(message)
logger.info(message)
logger.info(result)
let output = result
callback(null, output);
};
The logs in CloudWatch Logs shows that the message log is coming find but the result one is coming back with nothing at all other than the log level.
loadRedis is declared async, but you don't return anything, so it's not really await-ing anything..
Probably you just need to do:
const loadRedis = async (message) => {
return new Promise((resolve, reject) => {
return client.hmset(`${message.region}:${message._id}`, message, (err, res) => {
if(err) {
logger.error(`Error: ${JSON.stringify(err)}`)
reject(err)
}
if(res) {
logger.info(`Response: ${JSON.stringify(res)}`)
resolve(res)
}
})
}
}

A Promise with child_process running a Python script in Node.js - Process exits before all data

I'm trying to get sublist3r to run in a node app. It runs, however, it only shows the banner and then exits out in about 5 seconds. The script is supposed to reach out to the web and takes about 30 seconds to run. If I don't use a promise, it will work just fine. Does it have something to do with pyrog.stdout.on('data') not waiting before it outputs? I've read around and tried 'end' with no luck.
Tried everything on this article if it didn't involve editing python script (I don't think I should need to ?!)
Also read the Node.js Child Process but it doesn't mention using promises with spawn, and I believe that's what I need to use for running a Python script.
sublist3r screen shot
Am I using Promises wrong?
Any help is much appreciated
Edit: Added await to runPy, verified same issue. Have also tried making it a variable, let test = await runPy ... with no success
var express = require('express');
var router = express.Router();
let runPy = new Promise((success, nosuccess) => {
const {spawn} = require('child_process')
const pyprog = spawn('python',['/path/to/sublist3r.py', '-d', 'domain.com'] )
pyprog.stdout.on('data', (data) => {
success(data)
})
pyprog.stderr.on('data', (data) => {
nosuccess(data)
})
pyprog.on('close', (code) => {
console.log(`child process ended with ${code}`);
})
})
/* GET home page. */
router.get('/', async (req, res) => {
// EDIT: Added await and verified same issue.
await runPy.then(fromRunPy => {
console.log(fromRunPy.toString());
})
// It works fine below, but I want a promise for no other reason that I can't get it to work...
// const { spawn } = require('child_process');
// const pyProg = spawn('python', ['/home/wayne/BugHunterJS/controllers/Sublist3r/sublist3r.py', '-d', req.body.domain]);
// console.log('inside post');
// pyProg.stdout.on('data', function(data) {
// let sublist3rData = data.toString();
// console.log(sublist3rData);
// });
});
module.exports = router
Not shortly after I asked, I found a solution if anyone is looking. I'm still not really sure how it works, got to figure that out later. I think pushing it to the array is the key.
const {
spawn
} = require('child_process')
const logOutput = (name) => (data) => console.log(`[${name}] ${data}`)
function run() {
return new Promise((resolve, reject) => {
const process = spawn('python', ['/path/sublist3r.py', '-d', 'domain.com']);
const out = []
process.stdout.on(
'data',
(data) => {
out.push(data.toString());
logOutput('stdout')(data);
}
);
const err = []
process.stderr.on(
'data',
(data) => {
err.push(data.toString());
logOutput('stderr')(data);
}
);
process.on('exit', (code, signal) => {
logOutput('exit')(`${code} (${signal})`)
if (code === 0) {
resolve(out);
} else {
reject(new Error(err.join('\n')))
}
});
});
}
(async () => {
try {
const output = await run()
logOutput('main')(output)
process.exit(0)
} catch (e) {
console.error('Error during script execution ', e.stack);
process.exit(1);
}
})();

How to share a variable between two files generated by a function

I am writing an API in NodeJS and I have ran into a brick wall. I am trying to use a function to grab a variable and use module.exports to use said variable in another file. This however keeps coming up as undefined in the console.
I have already tried used return statements in different places in the file but I keep getting undefined.
This is what the code looks like to grab the variable and export it.
File 1 (api.js)
const fs = require('fs');
const homeDir = require('os').homedir();
module.exports = {
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}
};
File 2
const api = require('../api');
console.log(api.workingDirectory);
.unitv file
{
"WorkingDirectory": "/home/user/UniTV",
"Port": "3000"
}
In the console it will turn up as undefined when it should turn up with the value of the "working directory" in /home/user/.unitv
Any and all help is appreciated, thanks.
Your current code is particularly problematic.
return dir; occurs before fs.access/fs.readFile finishes. These are asynchronous functions and require the use of callback, promise, or async/await styled coding. The gist of it is that the code continues executing other code while it waits on I/O (such as reading a file) and the way you have written it causes nothing to be returned. See https://repl.it/#CodyGeisler/readFileCallback for a working callback example.
workingDirectory: () => {
let dir;
fs.access(`${homeDir}/.unitv`, fs.constants.F_OK, (err) => {
if(err) throw err;
fs.readFile(`${homeDir}/.unitv`, 'utf8', (readErr, data) => {
if(readErr) throw readErr;
let jsonData = JSON.parse(data);
dir = jsonData.WorkingDirectory;
});
});
return dir;
}

How come both console.log() execute when there is a condition to prevent such a thing from happening?

I am trying to automate the MySQL server startup once I start my application with the following code:
var exec = require('child_process').exec;
let check = new Promise((resolve, reject) =>
exec('mysql-ctl status', function(error, stdout, stderr) {
if (error)
reject(error);
else
resolve(stderr);
})
);
let start = new Promise((resolve, reject) =>
exec('mysql-ctl start', function(error, stdout, stderr) {
if (error)
reject(error);
else
resolve(console.log("MySQL server is up!"));
})
);
async function check_status() {
return await check;
}
async function start_server(status) {
if (status.trim() == 'MySQL is stopped') {
console.log("Starting MySQL server...");
await start;
} else console.log("MySQL server was up...");
}
module.exports = async function() {
check_status()
.then((status) => start_server(status))
.catch((error) => console.log("Error: " + error));
};
However, if the server is already up, I get the following output:
App Started. Listening on PORT 8080
MySQL server was up...
MySQL server is up!
How come both console.log() are executing when there is a condition to prevent precisely that?
When you are calling new Promise(resolve, reject => ()) you are starting execution of function given to promise. That function is executed asynchronously.
So in your case you start checking if MySQL is running and next you start script for starting MySQL, but before check is finished.
module.exports = async function() {
check_status()
.then((status) => start_server(status))
.catch((error) => console.log("Error: " + error));
};
When this function is executed you are just awaiting result from check in check_status(). But that check was started when check variable is created.
Result of this is in your then method => MySQL server is up!. When check is being executed start is also being executed and because MySQL server is not started you are getting Starting MySQL server...

Categories

Resources