My folder structure:
app
Model
user.js
post.js
My package.json file
"scripts": {
"migrate": "node ./app/Model/"
}
I want to run javascript file in command line dynamically.
Like:
npm run migrate user.js
npm run migrate post.js
Is there any way to achieve this?
You could write a script that dynamically requires the decired js file.
"scripts": {
"migrate": "node model.js"
}
Then model.js like this:
const path = require('path');
require(path.join(__dirname, 'app', 'Model', process.argv[2]));
"scripts": {
"migration": "node Model.js"
}
const program = require('commander');
program
.command('f <file>')
.description('Database migration.')
.action(async (file) => {
console.log(file);//do something here
});
program.parse(process.argv);
npm run migration f post.js
Related
My file is:
import * as fs from 'fs'
async function loadModels() {
console.log('here i am!')
const modelFiles = fs.readFileSync(__dirname + '/models')
console.log(modelFiles)
}
(async () => {
loadModels()
})()
And in package.json, I have:
"fixtures": "tsc fixtures/index"
So when I run yarn fixtures, I get:
yarn fixtures
yarn run v1.22.4
$ tsc fixtures/index
✨ Done in 8.78s.
Why won't my loadModels run?
tsc is only compiling your TypeScript file, it's not executing it
to execute your TypeScript file you can use ts-node package
and then in your package.json:
"fixtures": "ts-node fixtures/index"
I'm having monorepo with lerna and demo web-site.
I want to publish changed packages and automatically update it in demo from 1 command like npm run release.
package.json:
...
"scripts": {
"release": "node ./release.js",
...
}
release.js
const { spawnSync } = require('child_process');
const cmd = `npx lerna publish`;
const updatedPackages = spawnSync(cmd, { stdio: 'inherit', shell: true }); // there i want get list of successfully published packages.
udpdateDemo(updatedPackages); // custom function, which get list of packages and update it for demo
I have many js scripts in one folder (scripts/*.js).
How to execute them all from the gulp task (instead of using 'node script.js' many times)?
something like
gulp.task('exec_all_scripts', function () {
gulp.src(path.join(__dirname, './scripts/*.js'))
})
Gulp is a task runner, meaning it's meant to automate sequences of commands; not run entire scripts. Instead, you can use NPM for that. I don't think there's a way to glob scripts and run them all at once, but you can set each file as its own npm script and use npm-run-all to run them:
{
"name": "sample",
"version": "0.0.1",
"scripts": {
"script:foo": "node foo.js",
"script:bar": "node bar.js",
"script:baz": "node baz.js",
"start": "npm-run-all --parallel script:*",
},
"dependencies": {
"npm-run-all": "^4.0.2"
}
}
Then you can use npm start to run all your scripts at once.
If you really need to use gulp to run the scripts, you can use the same strategy, and then use gulp-run to run the npm script with gulp.
var run = require('gulp-run');
// use gulp-run to start a pipeline
gulp.task('exec_all_scripts', function() {
return run('npm start').exec() // run "npm start".
.pipe(gulp.dest('output')); // writes results to output/echo.
})
you can export functions in your scripts/*.js and import them in gulpfile.js and call the functions in 'exec_all_scripts' task, it's easy
You could concatinate all of the scripts into a single script and then execute it from the same task, a different task, or using a different process. See the following NPM package: https://www.npmjs.com/package/gulp-concat
Here is an example:
var concat = require('gulp-concat'); // include package
gulp.task('exec_all_scripts', function() {
return gulp.src(path.join(__dirname, './scripts/*.js')
.pipe(concat('all_scripts.js'))
.pipe(gulp.dest('./dist/')); // assuming you had a dist folder but this could be anything
});
I my package.json I have
"scripts": {
"test": "node tests/*-test.js"
}
And I have a-test.js and b-test.js in the tests folder, which I can verify by running ls tests/*-test.js.
However, npm run test is only executing a-test.js. How can I execute all *-test.js scripts? Explicitly listing them is not an option, since I will have more than 2 to run in the future.
You could use a task manager such as grunt or gulp, or a simple script that execute those scripts:
test.js:
require('./test/a-test.js')
require('./test/b-test.js')
package.json
"scripts": {
"test": "node test.js"
}
You could also use the include-all module for automating these for you https://www.npmjs.com/package/include-all
Example using includeAll:
const path = require('path');
const includeAll = require('include-all');
const controller = includeAll({
dirname: path.join(__dirname, 'test'),
filter: /(.+test)\.js$/,
});
I have a gulp.js file that includes:
gulp.task('default', ['watch']);
Which starts up the watch task
gulp.task('watch', function(){
gulp.watch(productionScripts, ['autoConcat']);
});
Then on any saved changes to files in productionScripts, the watch task will concat the files.
What I would like to do, is in my package.json, I would like to spool up this watch when I type npm start (this already starts my node server).
package.json
"start": "node server.js",
UPDATE--------
Ben(b3nj4m.com), I tried what you stated. The watch and server start up. However, everything runs twice (probably due to the editor, not related), but I do lose my server log when I start it up with gulp.
[15:31:18] Starting 'autoConcat'...
[15:31:18] Finished 'autoConcat' after 147 ms
[15:31:19] Starting 'autoConcat'...
[15:31:19] Finished 'autoConcat' after 138 ms
[15:31:20] Starting 'autoConcat'...
[15:31:20] Finished 'autoConcat' after 127 ms
[15:31:23] Starting 'autoConcat'...
It's like there is a loop between the server restarting on a change, and the concatenated file changing.
You could run your server from your gulpfile:
var child = require('child_process');
var fs = require('fs');
gulp.task('default', ['server', 'watch']);
gulp.task('server', function() {
var server = child.spawn('node', ['server.js']);
var log = fs.createWriteStream('server.log', {flags: 'a'});
server.stdout.pipe(log);
server.stderr.pipe(log);
});
gulp.task('watch', function(){
gulp.watch(productionScripts, ['autoConcat']);
});
Then change your npm start definition to look like:
"scripts": {
"start": "gulp"
}
You could concatenate multiple tasks in your start in package.json using the package concurrently as such:
{
"start": "concurrent \"node server.js\" \"gulp\" "
}
And run npm start from your terminal. This would execute all statements within start.
For references: https://www.npmjs.com/package/concurrently
EDIT:
As pointed out by #Josh in the comments, the CLI name now matches the package name. Hence, you could write the script as:
{
"start": "concurrently \"node server.js\" \"gulp\" "
}
I have something like this in one of my projects. Note that it will background both processes - you can use ps to get the ID and stop it with kill <pid>.
"scripts": {
"start": "{ gulp watch & node server.js & }"
}
To disable logging, too:
"scripts": {
"start": "{ gulp watch --silent & node server.js & }"
}
One best practice to consider is to use nodemon and gulp-nodemon and then like the accepted answer, trigger the gulp script from npm with npm start. It's blazing fast and you get the node server restarted on file changes. For example:
gulpfile.js
var gulp = require('gulp');
var nodemon = require('gulp-nodemon');
...
var nodemonOptions = {
script: 'bin/www.js',
ext: 'js',
env: { 'NODE_ENV': 'development' },
verbose: false,
ignore: [],
watch: ['bin/*', 'routes/*', 'app.js']
};
gulp.task('start', function () {
nodemon(nodemonOptions)
.on('restart', function () {
console.log('restarted!')
});
});
package.json
{
...
"scripts": {
"start": "gulp start"
},
"devDependencies": {
"gulp": "^3.9.0",
"gulp-nodemon": "^2.0.4"
}
}