Stop nodemon programmatically after run - javascript

I'm working on an API that runs in the background, we have some helper methods that need to be launch as such nodemon --exec babel-node commands/router.js to display all routes for instance.
Note that node commands/router.js cannot work as we need babel
At the moment, the method runs without problem but I need to stop nodemon from running after execution. I understand that nodemon is supposed to keep running after execution but our project is designed as such and I need to use nodemon for execution and then kill it.
How can I kill/stop nodemon after run?
Code
package.json
{
...
scripts: {
"start": "nodemon --exec babel-node index.js",
"router": "nodemon --exec babel-node commands/router.js"
},
...
}
router.js
const script = () => {
// Fetch and display routes
}
script()

EDIT:
From Nodemon documentation, the correct way to handle this use case is to manually kill the process with the gracefulShutdown method, like so:
process.once('SIGUSR2', function () {
gracefulShutdown(function () {
process.kill(process.pid, 'SIGUSR2');
});
});
You can read more here.

We found a solution by removing nodemon and manually ending scripts with process.exit()
Final code
package.json
{
...
scripts: {
"start": "nodemon --exec babel-node index.js", // <= still called with nodemon
"router": "babel-node commands/router.js"
},
...
}
router.js
const script = () => {
// Fetch and display routes
process.exit()
}
script()

Related

Catch exit sign from npm script package.json

is it possible to hook the exit sign of an npm script?
"scripts": {
"serve": "docker-compose up && npm start"
}
I want to be able to call docker-compose down when exiting the script with ctrl+c
With a shell script, this is possible by 'trapping' the signed exit 0
#!/bin/bash
trap 'docker-compose down ; echo Stopped ; exit 0' SIGINT
docker-compose up &
npm start
done
I would rather not use a shell script so it can be run on other OS than Unix like systems.
I was able to use your initial script in an npm script (package.json) when running docker compose in the background (-d).
"scripts": {
...
"up": "trap 'docker-compose down ; echo Stopped ; exit 0' SIGINT; docker-compose up -d && npm start"
...
}
I would rather not use a shell script so it can be run on other OS than Unix like systems.
You can write your script in Node.js to make it compatible with any OS where npm start can be run.
#!/usr/bin/env node
'use strict';
const childProcess = require('child_process');
childProcess.spawnSync('docker-compose', ['up'], { stdio: 'inherit'});
process.on('SIGINT', () => {
console.log('SIGINT caught, running docker-compose down');
childProcess.spawnSync('docker-compose', ['down'], { stdio: 'inherit' });
process.exit(0);
});
console.log('go ahead, press control-c');
childProcess.spawnSync('npm', ['start'], { stdio: 'inherit' });

What is the proper way to start multiple local node.js servers?

I'm creating an example project for an open source framework. For my demo to run, some of it's dependencies must be running local servers on other ports.
I'd rather just provide them a single command to run instead of telling them to open multiple terminals and run multiple commands in each.
What is the best/most proper/most elegant way to go about this?
This is how I accomplish this for two web servers. You should be able to play with more &'s and fg's to get more servers.
package.json:
{
"scripts": {
"start": "node node_modules/something/server.js & node server.js && fg
}
}
So the user only has to run npm install and then npm start to run two servers in one terminal and ctrl-c kills both.
Breakdown:
node node_modules/something/server.js & run this server in the background
node server.js && run my server in the foreground
fg move the most recently backgrounded shell into the foreground
If you use the npm package call 'concurrently' set up your package.json file as below
you can use the following 3 commands
run only server
npm run server
run only client
npm run client
run both
npm run dev
"scripts": {
"server": "nodemon server.js --ignore client",
"client": "npm start --prefix client",
"dev": "concurrently \"npm run server\" \"npm run client\""
},
For those who want this case:
If you want to run a single script that will open multiple terminals and run different nodejs servers on each you can do something like (this is for windows.. for other os you can change command):
You can write a single nodejs file that will start all your other servers in different terminal windows
startAllServers.js:
const child_process = require('child_process');
// commands list
const commands = [
{
name: 'Ap1-1',
command: 'cd ./api1 && start nodemon api1.js'
},
{
name: 'Ap1-2',
command: 'cd ./api2 && start nodemon api2.js'
}
];
// run command
function runCommand(command, name, callback) {
child_process.exec(command, function (error, stdout, stderr) {
if (stderr) {
callback(stderr, null);
} else {
callback(null, `Successfully executed ${name} ...`);
}
});
}
// main calling function
function main() {
commands.forEach(element => {
runCommand(element.command, element.name, (err, res) => {
if (err) {
console.error(err);
} else {
console.log(res);
}
});
});
}
// call main
main();
Use concurrently npm package.
concurrently "node server.js" "node client.js"
This allows you to write multiple commands with clean output in one go. And they don't just have to be node servers. You can combine any bash commands.

run node js scripts from gulp task

I have many js scripts in one folder (scripts/*.js).
How to execute them all from the gulp task (instead of using 'node script.js' many times)?
something like
gulp.task('exec_all_scripts', function () {
gulp.src(path.join(__dirname, './scripts/*.js'))
})
Gulp is a task runner, meaning it's meant to automate sequences of commands; not run entire scripts. Instead, you can use NPM for that. I don't think there's a way to glob scripts and run them all at once, but you can set each file as its own npm script and use npm-run-all to run them:
{
"name": "sample",
"version": "0.0.1",
"scripts": {
"script:foo": "node foo.js",
"script:bar": "node bar.js",
"script:baz": "node baz.js",
"start": "npm-run-all --parallel script:*",
},
"dependencies": {
"npm-run-all": "^4.0.2"
}
}
Then you can use npm start to run all your scripts at once.
If you really need to use gulp to run the scripts, you can use the same strategy, and then use gulp-run to run the npm script with gulp.
var run = require('gulp-run');
// use gulp-run to start a pipeline
gulp.task('exec_all_scripts', function() {
return run('npm start').exec() // run "npm start".
.pipe(gulp.dest('output')); // writes results to output/echo.
})
you can export functions in your scripts/*.js and import them in gulpfile.js and call the functions in 'exec_all_scripts' task, it's easy
You could concatinate all of the scripts into a single script and then execute it from the same task, a different task, or using a different process. See the following NPM package: https://www.npmjs.com/package/gulp-concat
Here is an example:
var concat = require('gulp-concat'); // include package
gulp.task('exec_all_scripts', function() {
return gulp.src(path.join(__dirname, './scripts/*.js')
.pipe(concat('all_scripts.js'))
.pipe(gulp.dest('./dist/')); // assuming you had a dist folder but this could be anything
});

Using wildcard to run multiple scripts for npm run test

I my package.json I have
"scripts": {
"test": "node tests/*-test.js"
}
And I have a-test.js and b-test.js in the tests folder, which I can verify by running ls tests/*-test.js.
However, npm run test is only executing a-test.js. How can I execute all *-test.js scripts? Explicitly listing them is not an option, since I will have more than 2 to run in the future.
You could use a task manager such as grunt or gulp, or a simple script that execute those scripts:
test.js:
require('./test/a-test.js')
require('./test/b-test.js')
package.json
"scripts": {
"test": "node test.js"
}
You could also use the include-all module for automating these for you https://www.npmjs.com/package/include-all
Example using includeAll:
const path = require('path');
const includeAll = require('include-all');
const controller = includeAll({
dirname: path.join(__dirname, 'test'),
filter: /(.+test)\.js$/,
});

How to start Gulp watch task when I type npm start

I have a gulp.js file that includes:
gulp.task('default', ['watch']);
Which starts up the watch task
gulp.task('watch', function(){
gulp.watch(productionScripts, ['autoConcat']);
});
Then on any saved changes to files in productionScripts, the watch task will concat the files.
What I would like to do, is in my package.json, I would like to spool up this watch when I type npm start (this already starts my node server).
package.json
"start": "node server.js",
UPDATE--------
Ben(b3nj4m.com), I tried what you stated. The watch and server start up. However, everything runs twice (probably due to the editor, not related), but I do lose my server log when I start it up with gulp.
[15:31:18] Starting 'autoConcat'...
[15:31:18] Finished 'autoConcat' after 147 ms
[15:31:19] Starting 'autoConcat'...
[15:31:19] Finished 'autoConcat' after 138 ms
[15:31:20] Starting 'autoConcat'...
[15:31:20] Finished 'autoConcat' after 127 ms
[15:31:23] Starting 'autoConcat'...
It's like there is a loop between the server restarting on a change, and the concatenated file changing.
You could run your server from your gulpfile:
var child = require('child_process');
var fs = require('fs');
gulp.task('default', ['server', 'watch']);
gulp.task('server', function() {
var server = child.spawn('node', ['server.js']);
var log = fs.createWriteStream('server.log', {flags: 'a'});
server.stdout.pipe(log);
server.stderr.pipe(log);
});
gulp.task('watch', function(){
gulp.watch(productionScripts, ['autoConcat']);
});
Then change your npm start definition to look like:
"scripts": {
"start": "gulp"
}
You could concatenate multiple tasks in your start in package.json using the package concurrently as such:
{
"start": "concurrent \"node server.js\" \"gulp\" "
}
And run npm start from your terminal. This would execute all statements within start.
For references: https://www.npmjs.com/package/concurrently
EDIT:
As pointed out by #Josh in the comments, the CLI name now matches the package name. Hence, you could write the script as:
{
"start": "concurrently \"node server.js\" \"gulp\" "
}
I have something like this in one of my projects. Note that it will background both processes - you can use ps to get the ID and stop it with kill <pid>.
"scripts": {
"start": "{ gulp watch & node server.js & }"
}
To disable logging, too:
"scripts": {
"start": "{ gulp watch --silent & node server.js & }"
}
One best practice to consider is to use nodemon and gulp-nodemon and then like the accepted answer, trigger the gulp script from npm with npm start. It's blazing fast and you get the node server restarted on file changes. For example:
gulpfile.js
var gulp = require('gulp');
var nodemon = require('gulp-nodemon');
...
var nodemonOptions = {
script: 'bin/www.js',
ext: 'js',
env: { 'NODE_ENV': 'development' },
verbose: false,
ignore: [],
watch: ['bin/*', 'routes/*', 'app.js']
};
gulp.task('start', function () {
nodemon(nodemonOptions)
.on('restart', function () {
console.log('restarted!')
});
});
package.json
{
...
"scripts": {
"start": "gulp start"
},
"devDependencies": {
"gulp": "^3.9.0",
"gulp-nodemon": "^2.0.4"
}
}

Categories

Resources