Node.js: how to check if spawned process was successfully killed - javascript

I am trying to stop a spawned process in Node.js. My DownloaderWrapper class, can spawn and make use of a few different downloader applications, which can be swapped out by changing the environment variable DOWNLOADER_PATH. However, I have noticed that depending on the downloader application being used it will not respond to the SIGKILL signal sent by the stop function and keeps running.
Is there any way I can ensure the stopping of the spawned process? Or if I cannot, can I detect if a process failed to stop and return 'failed' in my stop function?
Testing on Windows.
export default class DownloaderWrapper {
constructor() {
this.process;
}
async download(spawnArgs) {
if (!process) {
this.process = spawn(process.env.DOWNLOADER_PATH, spawnArgs, { windowsHide: true });
}
}
async stop() {
if (this.process) {
this.process.kill('SIGKILL');
return new Promise(resolve => {
this.process.on('close', (code, signal) => {
console.log(`child process terminated due to receipt of signal "${signal}"`);
resolve('stopped');
});
});
} else {
return 'stopped';
}
}
}

Try changing your stop function to this:
function stop() {
return new Promise((resolve, reject) => {
if (this.process) {
this.process.on('exit', (code) => {
resolve('stopped with code ' + code);
});
this.process.kill('SIGKILL');
} else {
reject('no process running');
}
});
}
// eg
stop().then((result) => {
console.log(result);
}).catch((err) => {
console.error(err);
});
I could be wrong, but I don't think there is any need to define your stop function as async since you are returning a Promise anyway. And in your original code you are killing the process before your listener is added. In addition, I can find no documentation in the official NodeJS documentation about a listener for Process.on called "close" which is why I replaced it in the code with process.on('exit'). Hopefully some of these things explain why you are not seeing any evidence that your process is being killed, because it isn't being killed, and even if it was, you wouldn't see a message because there is no such event as close.
In addition, with the way this code is, after the process is killed, you still have a Process object stored in this.process. If you were to call the stop() function again, then if(this.process) would still be true and it will try to kill it again. You might want to set this.process to null or undefined after you kill it, depending on your use case.

Related

How to stop custom function in JavaScript

look at this code
const code = "..." // can be anything, it is unknown
const func = new Function(code)
// run `func` and ignore possible errors in code
function run() {
try { func() } finally {}
}
// stop `func` execution
function stop() {
???
}
$runButton.onclick = run
$stopButton.onclick = stop
Is there any way to stop function execution
I just need to rerun function multiple times, kind of playground
But if code has timers, or infinite loops it freezes behaves badly
I don't want to reload playground site every time
Any ideas?
Same Situation in TS Playground, they don't care (press run multiple times)
Tried this, but that does not make sense
If that function's body is unpredictable
Also another way, which seems to be the most inefficient
Put checks in every line of function's body, and if flag is false break main label which is declared at the top of function's body for 'return' effect
let isRunning = true
code.split('/n').map(line => ';' + `if (!isRunning) break functionLabel` + ';' + line)
const functionBody = `functionLabel: { ${code} }`
const func = new Function(functionBody)
Create a web worker and pass code as message,
theoretically, it should run it in the background not affecting the main thread. Then terminate worker if you need to stop execution
For multiple 'reruns' reinit worker
worker.js
self.addEventListener('message', event => {
const { code } = event.data
const func = new Function(code)
try {
func()
self.postMessage('code ran successfully')
} catch (error) {
// handle possible error
self.postMessage('code ran with errors')
}
})
main.js
let worker = null
function run() {
let code = '...'
worker?.postMessage({ code })
}
function stop() {
if (worker == null) {
worker = new Worker(pathToWorker)
worker.addEventListener('message', event => {
console.log(event.data)
})
} else {
worker.terminate()
worker = null
}
}
Web Workers Support (98%)

What are ways to run a script only after another script has finished?

Lets say this is my code (just a sample I wrote up to show the idea)
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
function RunFunc() {
extract.Start();
console.log("Extraction complete");
rescore.Start();
console.log("Scoring complete");
}
And I want to not let the rescore.Start() run until the entire extract.Start() has finished. Both scripts contain a spiderweb of functions inside of them, so having a callback put directly into the Start() function is not appearing viable as the final function won't return it, and I am having a lot of trouble understanding how to use Promises. What are ways I can make this work?
These are the scripts that extract.Start() begins and ends with. OpenWriter() is gotten to through multiple other functions and streams, with the actual fileWrite.write() being in another script that's attached to this (although not needed to detect the end of run. Currently, fileWrite.on('finish') is where I want the script to be determined as done
module.exports = {
Start: function CodeFileRead() {
//this.country = countryIn;
//Read stream of thate address components
fs.createReadStream("Reference\\" + postValid.country + " ADDRESS REF DATA.csv")
//Change separator based on file
.pipe(csv({escape: null, headers: false, separator: delim}))
//Indicate start of reading
.on('resume', (data) => console.log("Reading complete postal code file..."))
//Processes lines of data into storage array for comparison
.on('data', (data) => {
postValid.addProper[data[1]] = JSON.stringify(Object.values(data)).replace(/"/g, '').split(',').join('*');
})
//End of reading file
.on('end', () => {
postValid.complete = true;
console.log("Done reading");
//Launch main script, delayed to here in order to not read ahead of this stream
ThisFunc();
});
},
extractDone
}
function OpenWriter() {
//File stream for writing the processed chunks into a new file
fileWrite = fs.createWriteStream("Processed\\" + fileName.split('.')[0] + "_processed." + fileName.split('.')[1]);
fileWrite.on('open', () => console.log("File write is open"));
fileWrite.on('finish', () => {
console.log("File write is closed");
});
}
EDIT: I do not want to simply add the next script onto the end of the previous one and forego the master file, as I don't know how long it will be and its supposed to be designed to be capable of taking additional scripts past our development period. I cannot just use a package as it stands because approval time in the company takes up to two weeks and I need this more immediately
DOUBLE EDIT: This is all my code, every script and function is all written by me, so I can make the scripts being called do what's needed
You can just wrap your function in Promise and return that.
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
fs.createReadStream(
'Reference\\' + postValid.country + ' ADDRESS REF DATA.csv'
)
// .......some code...
.on('end', () => {
postValid.complete = true;
console.log('Done reading');
resolve('success');
});
});
}
};
And Run the RunFunc like this:
async function RunFunc() {
await extract.Start();
console.log("Extraction complete");
await rescore.Start();
console.log("Scoring complete");
}
//or IIFE
RunFunc().then(()=>{
console.log("All Complete");
})
Note: Also you can/should handle error by reject("some error") when some error occurs.
EDIT After knowing about TheFunc():
Making a new Event emitter will probably the easiest solution:
eventEmitter.js
const EventEmitter = require('events').EventEmitter
module.exports = new EventEmitter()
const eventEmitter = require('./eventEmitter');
module.exports = {
Start: function CodeFileRead() {
return new Promise((resolve, reject) => {
//after all of your code
eventEmitter.once('WORK_DONE', ()=>{
resolve("Done");
})
});
}
};
function OpenWriter() {
...
fileWrite.on('finish', () => {
console.log("File write is closed");
eventEmitter.emit("WORK_DONE");
});
}
And Run the RunFunc like as before.
There's no generic way to determine when everything a function call does has finished.
It might accept a callback. It might return a promise. It might not provide any kind of method to determine when it is done. It might have side effects that you could monitor by polling.
You need to read the documentation and/or source code for that particular function.
Use async/await (promises), example:
var extract = require("./postextract.js");
var rescore = require("./standardaddress.js");
RunFunc();
async function extract_start() {
try {
extract.Start()
}
catch(e){
console.log(e)
}
}
async function rescore_start() {
try {
rescore.Start()
}
catch(e){
console.log(e)
}
}
async function RunFunc() {
await extract_start();
console.log("Extraction complete");
await rescore_start();
console.log("Scoring complete");
}

How to write a generic rollback or halt in a function

These examples are going to be super simplified but hopefully you get the gist.
Say I have a function as so, which runs a series of tasks and can take a long time:
async function doSomeStuff() {
await aLongTask();
await anotherBigOldTask();
await bigNestedTaskThatTakesForever();
return Promise.resolve('Done');
}
Now I have a chokidar watch which looks for file changes and runs said function:
const watcher = chokidar.watch(watchURLs, {
ignored: /\.git/,
ignoreInitial: true,
persistent: true,
ignorePermissionErrors: true,
atomic: 500,
});
watcher.on('all', async (event, filePath) => {
await doSomeStuff();
});
The issue is that when the files are changed many times whilst doSomeStuff() is running, it tries to run the build multiple times. This causes a whole wealth of errors, which I've crudely fixed with a timeout lock:
if (!locked) {
await doSomeStuff().finally(() => {
locked = false;
info('Waiting for file changes in', ...watchURLs);
});
}
else {
debug('Attempting to trigger a locked build');
}
locked = true;
clearTimeout(lockTimeout);
lockTimeout = setTimeout(() => {
locked = false;
debug('Unlocked due to timeout');
}, 10000);
This stops stuff from completely imploding, but it means that everything will always be out of date. It's only when saving a file after the lock grace period is over that it will be picked up. And if a load of files are saved, it could build with just half of the updated ones included.
So, how can the doSomeStuff() function be completely cancelled/halted/rolled-back immediately before a new build is triggered? I don't want to do something where I'm setting a variable that causes doSomeStuff() to return that I wrap around every task, because that's not immediate, and doesn't work for any chuggy functions nested inside. It's almost as if I want to throw an error into a function, rather than that function controlling what to throw for itself.
Can anyone think of a way of immediately forcing a function to stop executing without killing the whole script (a la process.exit())? Or if there's just a simple way of doing this kind of thing? TIA.
Throwing in an error actually sounds good, that can be done quite elgantly:
const cancellable = fn => (...args) => {
let cancel = false;
const breakpoint = () => {
if(cancel) throw new Error("Cancelation");
};
return {
result: fn(breakpoint, ...args).catch(console.log),
stop() { cancel = true; },
};
};
That can be used as:
const doSomeStuff = cancellable(async function doSomeStuff(breakpoint) {
await aLongTask();
breakpoint();
await anotherBigOldTask();
breakpoint();
await bigNestedTaskThatTakesForever();
return "Done";
});
let task = doSomeStuff();
task.stop();
No, what you are looking for is not possible. There is no "task" abstraction that implicitly wraps something, and you cannot interrupt arbitrary functions. They need to explicitly opt into being cancellable (e.g. by checking a breakpoint that is passed to them, like in Jonas' answer). You can do that using a child process which you can kill, but that might leave inconsistent state behind.
Let's break it down:
Promise Cancellation This does exist. You need a polyfill to add support for .cancel which when called will cancel the Promise execution. You can read more about this here. Once you install a polyfill, your promise will have a .cancel so you can have:
const doSomeStuff = () => {
return new Promise(resolve => {
...
})
}
let prmise = null;
const watcher = chokidar.watch(watchURLs, {
ignored: /\.git/,
ignoreInitial: true,
persistent: true,
ignorePermissionErrors: true,
atomic: 500,
});
watcher.on('all', (event, filePath) => {
promise = doSomeStuff();
});
watcher.on('change', () => {
promise && promise.cancel()
});
Rollbar This does not exist. You'd have to implement this yourself. Maybe take a snapshot of the state at the very beginning of doSomeStuff and then if .cancel was called, update the state to match the beginning state. But a "rollback" is not a predefined thing that can have native support, unlike databases where a rollback has a pretty good definition.

Restart Async Function After Aborting API Fetch

I am creating a Node.js module that takes a list of movie titles and fetches their respective metadata from omdbapi.com.
These lists are often very large and sometimes (with my current slow internet connection) the connection stalls due to too many concurrent connections. So I set up a timeout/abort method that restarts the process after 30 seconds.
The problem I'm having is that whenever I lose internet connection or the connection stalls, it just bails out of the process, and doesn't restart the connection.
Example:
async function getMetadata () {
const remainingMovies = await getRemainingMovies();
for (let i = 0; i < remainingMovies.length;i++) {
const { data, didTimeout } = await fetchMetadata(remainingMovies[i]);
// Update "remainingMovies" Array...
if (didTimeout) {
getMetadata();
break;
}
}
if (!didTimeout) {
return data;
}
}
This is obviously a simplified version but essentially:
The getMetadata Function gets the remainingMovies Array from the global scope.
Fetches the metadata from the server with the fetchMetadata Function.
Checks if the connection timed out or not.
If it did it should restart the Function and attempt to connect again.
If it didn't timeout then finish the for loop and continue.
I guess you want something similar to below script. Error handling using try/catch for async/await which probably is what you are looking for as a missing puzzle.
async function getMetadata() {
const remainingMovies = await getRemainingMovies();
remainingMovies.map(movie => {
try {
return await fetchMetadata(movie);
} catch (err) {
return getMetadata();
}
});
}

SignalR: Wait for connection to be re-established before invoking methods

I have a HubProxy with many client triggered methods as described below:
proxy.invoke('hub_Subscribe');
proxy.invoke('triggerOnServer');
proxy.invoke('dataToServer',someModel);
Now if signalr is not connected to server and I try to invoke any of the above methods, it would give me Connection must be started before data can be sent. or Connection was disconnected before invocation result was received. error.
I am aware that one can utilize connection.stateChanged to confirm whether signalR is connected or not and invoke methods accordingly. But there is a need to log these events so that they can be invoked once signalr connection is up.
So, is there a simple way to log these methods in case the connection is disconnected? And later once the signalR connection is up and running, invoke those methods?
Something like proxy.invoke('dataToServer',someModel).WaitForSignalRToBeConnected();
Note: I continuously connect to server after client gets disconnected using THIS
I currently have this problem solved with the following promise:
function GetOpenConnection() {
return new Promise((resolve, reject) => {
const msMax = 1000;
const msInc = 10;
var ms = 0;
var idInterval = setInterval(() => {
if (connection.connectionState == 1) {
clearInterval(idInterval);
resolve(connection);
}
ms += msInc;
if (ms >= msMax) {
clearInterval(idInterval);
reject(connection);
}
}, msInc);
});
}
Usage:
console.log('before GetOpenConnection()');
GetOpenConnection().then((connection) => {
console.log('Connected: ', connection.connectionState);
// invoke functions here
}).catch((connection) => {
console.log('Not connected: ', connection.connectionState);
});
console.log('after GetOpenConnection()');
What you need are promises using the $q service.
I'm assuming you have to initialize the hub connection first, then invoke all three other methods in that order.
Create a service if you dont already have one for managing communication with the signalR server. Inject the $q service and signalR in it.
Wrap each of the following commands in seperate methods:
function doInitialize() {
... // connect to signalR
return $q.when(connection.hub.start());
};
function doHubSubscribe() {
return $q.when(proxy.invoke('hub_Subscribe'));
};
function doTriggerOnServer() {
return $q.when(proxy.invoke('triggerOnServer'));
};
function doDataToServer() {
return $q.when(proxy.invoke('dataToServer',someModel));
};
Now from your controller you can make nested calls to the service's methods in a way that the first one needs to pass in order for the next ones to run:
function initSignalR() {
return service.doInitialize().then(function () {
return service.doHubSubscribe();
}).then(function () {
return service.doTriggerOnServer();
}).then(function () {
return service.doDataToServer();
})
};
Edit:
If you want to add more handling to your invokations, you can use the jQuery .done() and .fail() methods like this:
function sendDataToServer() {
$q.when(proxy.invoke('dataToServer', someModel))
.done(function (result) {
deferred.resolve(result);
console.log('success!');
})
.fail(function () {
deferred.reject();
console.error('Error invoking server!');
}));
return deferred.promise;
};
You can add more handling code to the .fail() method, like for example add the $timeout service to set retries.
Hope this helps.

Categories

Resources