I have a couple grunt tasks and I am trying to share global variables across those tasks and I am running into issues.
I have written a some custom tasks which set the proper output path depending on the build type. This seems to be setting things correctly.
// Set Mode (local or build)
grunt.registerTask("setBuildType", "Set the build type. Either build or local", function (val) {
// grunt.log.writeln(val + " :setBuildType val");
global.buildType = val;
});
// SetOutput location
grunt.registerTask("setOutput", "Set the output folder for the build.", function () {
if (global.buildType === "tfs") {
global.outputPath = MACHINE_PATH;
}
if (global.buildType === "local") {
global.outputPath = LOCAL_PATH;
}
if (global.buildType === "release") {
global.outputPath = RELEASE_PATH;
}
if (grunt.option("target")) {
global.outputPath = grunt.option("target");
}
grunt.log.writeln("Output folder: " + global.outputPath);
});
grunt.registerTask("globalReadout", function () {
grunt.log.writeln(global.outputPath);
});
So, I'm trying to then reference global.outputPath in a subsequent task, and running into errors.
If I call grunt test from the command line, it outputs the correct path no problem.
However, if I have a task like this:
clean: {
release: {
src: global.outputPath
}
}
It throws the following error:
Warning: Cannot call method 'indexOf' of undefined Use --force to continue.
Also, my constants in the setOutput task are set at the top of my Gruntfile.js
Any thoughts? Am I doing something wrong here?
So, I was on the right path. The issue is that the module exports before those global variables get set, so they are all undefined in subsequent tasks defined within the initConfig() task.
The solution I came up with, although, there may be better, is to overwrite a grunt.option value.
I have an optional option for my task --target
working solution looks like this:
grunt.registerTask("setOutput", "Set the output folder for the build.", function () {
if (global.buildType === "tfs") {
global.outputPath = MACHINE_PATH;
}
if (global.buildType === "local") {
global.outputPath = LOCAL_PATH;
}
if (global.buildType === "release") {
global.outputPath = RELEASE_PATH;
}
if (grunt.option("target")) {
global.outputPath = grunt.option("target");
}
grunt.option("target", global.outputPath);
grunt.log.writeln("Output path: " + grunt.option("target"));
});
And the task defined in initConfig() looked like this:
clean: {
build: {
src: ["<%= grunt.option(\"target\") %>"]
}
}
Feel free to chime in if you have a better solution. Otherwise, perhaps this may help someone else.
I have a way to do this that allows you to specify the output path using values like --dev. So far it's working very well, I quite like it. Thought I'd share it, as someone else may like it, too.
# Enum for target switching behavior
TARGETS =
dev: 'dev'
dist: 'dist'
# Configurable paths and globs
buildConfig =
dist: "dist"
dev: '.devServer'
timestamp: grunt.template.today('mm-dd_HHMM')
grunt.initConfig
cfg: buildConfig
cssmin:
crunch:
options: report: 'min'
files: "<%= grunt.option('target') %>/all-min.css": "/**/*.css"
# Set the output path for built files.
# Most tasks will key off this so it is a prerequisite
setPath = ->
if grunt.option 'dev'
grunt.option 'target', buildConfig.dev
else if grunt.option 'dist'
grunt.option 'target', "#{buildConfig.dist}/#{buildConfig.timestamp}"
else # Default path
grunt.option 'target', buildConfig.dev
grunt.log.writeln "Output path set to: `#{grunt.option 'target'}`"
grunt.log.writeln "Possible targets:"
grunt.log.writeln target for target of TARGETS
setPath()
With this setup, you can run commands like:
grunt cssmin --dist #sent to dist target
grunt cssmin --dev #sent to dev target
grunt cssmin --dev #sent to default target (dev)
This is an older question, I just thought to throw in my 5 cents.
If you need config variable to be accessible from any task, just define it in your main (the one that you'll always load) config file like this:
module.exports = function(grunt)
{
//
// Common project configuration
//
var config =
{
pkg: grunt.file.readJSON('package.json'),
options: // for 'project'
{
dist:
{
outputPath: '<%= process.cwd() %>/lib',
},
dev:
{
outputPath: '<%= process.cwd() %>/build',
},
},
}
grunt.config.merge( config )
}
Then you can simply access value like this:
in config file(s)
...
my_thingie:
[
ends_up_here: '<%= options.dev.outputPath %>/bundle',
],
...
in tasks
// as raw value
grunt.config.data.options.dist.outputPath
// after (eventual) templates have been processed
grunt.config('options.dist.outputPath')
I used key options here just to be in line with convention, but you can use anything as long as you remember not to register a task named 'options' or whatever you used for the key :)
Related
Closed. This question needs debugging details. It is not currently accepting answers.
Edit the question to include desired behavior, a specific problem or error, and the shortest code necessary to reproduce the problem. This will help others answer the question.
Closed 6 years ago.
Improve this question
I have gulp file set up to watch for changes. I'm developing an application in ReactJS using Redux architecture. What I've noticed is that the gulp does not watch for any changes in the SCSS files.
/*eslint-disable */
var path = require('path');
var runSequence = require('run-sequence');
var install = require('gulp-install');
var spawn = require('child_process').spawn;
var $ = require('gulp-load-plugins')({
pattern: [
'gulp',
'gulp-*',
'gulp.*',
'merge-stream',
'del',
'browserify',
'watchify',
'vinyl-source-stream',
'vinyl-transform',
'vinyl-buffer',
'glob',
'lodash',
'less-plugin-*',
'mochify'
],
replaceString: /^gulp(-|\.)/,
rename: {
'merge-stream': 'mergeStream',
'del': 'delete'
}
});
var env = require('env-manager')({
argv: process.argv,
dir: path.join(__dirname, 'environments'),
base: 'base.js',
pattern: '{env}.js',
defaults: {
'env': 'development'
}
});
$.util.log($.util.colors.magenta('Running in ' + env.name + ' environment'));
require('gulp-tasks-registrator')({
gulp: $.gulp,
dir: path.join(__dirname, 'tasks'),
args: [$, env],
verbose: true,
panic: true,
group: true
});
$.gulp.task('clean', ['clean:server', 'clean:client'], function task(done) {
done();
});
$.gulp.task('install', function () {
return $.gulp.src([ './package.json']).pipe(install());
});
$.gulp.task('build', function task(done) {
return runSequence(
//'lint',
// 'install',
'clean',
'build:server',
'build:client:images',
'build:client:fonts',
[
'build:client:scripts',
'build:client:styles'
],
'build:client:html',
done
);
});
$.gulp.task('run-wrapper', function(done) {
var server = spawn('node', ['serviceWrapper.js'], {stdio: ['inherit']});
server.stderr.on('data', function(data){
process.stderr.write(data);
});
server.stdout.on('data', function(data) {
process.stdout.write(data);
});
server.unref();
});
$.gulp.task('default', function task(done) {
runSequence('build', ['serve', 'run-wrapper','watch'], done);
});
$.gulp.task('run', function task(done) {
runSequence('serve', done);
});
/*eslint-enable */
In what you've provided, there's no watch task or Sass task (though you do call a task named watch so if running gulp (the default task) isn't giving you an error you must have defined the task named watch somewhere).
There are two Sass plugins for gulp, one using Ruby Sass (gulp-ruby-sass) and one using LibSass (gulp-sass). You can read about the difference here, but in short gulp-sass will probably be faste. The best way to find out is to try one and then the other and compare gulp's console logs (where it says "finished task after x ms").
Here's a SASS-watching example, edited very slightly from the example in the gulp-sass readme (assumes that gulp-sass is in your package.json, in which case it will have been imported by your gulp-load-plugins call). $.s added to match the code you provided
$.gulp.task('sass', function () {
return gulp.src('yourstylespath/*.scss') // grab the .scss files
.pipe(sass().on('error', sass.logError)) // compile them into css, loggin any errors
.pipe(gulp.dest('yourcompiledcsspath')); // save them in yourcompiledcsspath
});
$.gulp.task('sass:watch', function () {
gulp.watch('yourstylespath/*.scss', ['sass']); // "run the task 'sass' when there's a change to any .scss file in yourstylespath
});
Side notes:
Considering all the packages you're using that don't follow the "gulp-packagename" naming scheme, it might be more efficient to just write them out individually like this (of course depends on how many packages you're using)
var delete = require('del'),
mergeStream = require('merge-stream'),
...;
Looks like your run task could just be this? $.gulp.task('run', ['serve']);
I wonder if there is an easy way to detect if two tasks write to the same file.
In this example there is a /js directory alongside a /ts directory. The /ts will get transpiled to the same directory as the /js. There shouldn't be any collisions. The ask is that, if there are collisions, the ts will win; but, I would like to warn that there is a collision.
gulp.task('js', function() {
return es.concat(
gulp.src(config.src.path('js', '**', '*.js'))
.pipe(gulp.dest(config.build.path(app, 'js')))
//, ....
);
});
gulp.task('ts', ['js'], function() {
var tsResult = gulp.src(config.src.path('ts', '**', '*.ts'))
.pipe(ts({
declaration: true,
noExternalResolve: true
}));
return es.concat([
tsResult.dts.pipe(gulp.dest(
config.build.path(app, 'definitions'))),
tsResult.js.pipe(gulp.dest(
config.build.path(app, 'js'))) // <--- same dest as js task
]);
})
Can I detect that the ts task is overwriting a file that the js task just put in place?
Just an idea. You can pass a callback to gulp.dest like this:
gulp.src('lib/*.js')
.pipe(uglify())
.pipe(gulp.src('styles/*.css'))
.pipe(gulp.dest(function(file) {
if (fs.existsSync('something here')) { // it's a deprecated call, use a newer one
console.warn("File exists", file);
}
// I don't know, you can do something cool here
return 'build/whatever';
}));
The feature is available since Gulp 3.8: https://github.com/gulpjs/gulp/blob/master/CHANGELOG.md#380
Other resources:
https://stackoverflow.com/a/29437418/99256
https://stackoverflow.com/a/29817916/99256
I need to run React in production mode, which presumably entails defining the following somewhere in the enviornment:
process.env.NODE_ENV = 'production';
The issue is that I'm running this behind Tornado (a python web-server), not Node.js. I also use Supervisord to manage the tornado instances, so it's not abundantly clear how to set this in the running environment.
I do however use Gulp to build my jsx files to javascript.
Is it possible to somehow set this inside Gulp? And if so, how do I check that React is running in production mode?
Here is my Gulpfile.js:
'use strict';
var gulp = require('gulp'),
babelify = require('babelify'),
browserify = require('browserify'),
browserSync = require('browser-sync'),
source = require('vinyl-source-stream'),
uglify = require('gulp-uglify'),
buffer = require('vinyl-buffer');
var vendors = [
'react',
'react-bootstrap',
'jquery',
];
gulp.task('vendors', function () {
var stream = browserify({
debug: false,
require: vendors
});
stream.bundle()
.pipe(source('vendors.min.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(gulp.dest('build/js'));
return stream;
});
gulp.task('app', function () {
var stream = browserify({
entries: ['./app/app.jsx'],
transform: [babelify],
debug: false,
extensions: ['.jsx'],
fullPaths: false
});
vendors.forEach(function(vendor) {
stream.external(vendor);
});
return stream.bundle()
.pipe(source('build.min.js'))
.pipe(buffer())
.pipe(uglify())
.pipe(gulp.dest('build/js'));
});
gulp.task('watch', [], function () {
// gulp.watch(['./app/**/*.jsx'], ['app', browserSync.reload]);
gulp.watch(['./app/**/*.jsx'], ['app']);
});
gulp.task('browsersync',['vendors','app'], function () {
browserSync({
server: {
baseDir: './',
},
notify: false,
browser: ["google chrome"]
});
});
gulp.task('default',['browsersync','watch'], function() {});
2017 - Edit: anyone trying to set up React in Gulp for a new project: Just use create-react-app
Step I: Add the following to your gulpfile.js somewhere
gulp.task('apply-prod-environment', function() {
process.env.NODE_ENV = 'production';
});
Step II: Add it to your default task (or whichever task you use to serve/build your app)
// before:
// gulp.task('default',['browsersync','watch'], function() {});
// after:
gulp.task('default',['apply-prod-environment', 'browsersync','watch'], function() {});
OPTIONAL: If you want to be ABSOLUTELY CERTAIN that you are in prod mode, you can create the following slightly enhanced task instead of the one in Step I:
gulp.task('apply-prod-environment', function() {
process.stdout.write("Setting NODE_ENV to 'production'" + "\n");
process.env.NODE_ENV = 'production';
if (process.env.NODE_ENV != 'production') {
throw new Error("Failed to set NODE_ENV to production!!!!");
} else {
process.stdout.write("Successfully set NODE_ENV to production" + "\n");
}
});
Which will throw the following error if NODE_ENV is ever not set to 'production'
[13:55:24] Starting 'apply-prod-environment'...
[13:55:24] 'apply-prod-environment' errored after 77 μs
[13:55:24] Error: Failed to set NODE_ENV to production!!!!
Similar to the other answers, but hopefully gives someone a starting point:
var vendorList = ['react', 'react-dom'];
gulp.task('vendor-dev', function() {
browserify()
.require(vendorList)
.bundle()
.on('error', handleErrors)
.pipe(source('vendor.js'))
.pipe(gulp.dest('./build/dev/js'));
});
gulp.task('vendor-production', function() {
process.env.NODE_ENV = 'production';
browserify()
.require(vendorList)
.bundle()
.on('error', handleErrors)
.pipe(source('vendor.js'))
.pipe(buffer())
.pipe(uglify({ mangle: false }))
.pipe(gulp.dest('./build/production/js'));
});
The main difference is I am explicitly setting the NODE_ENV prior to bundling the vendor libraries. Gulp tasks aren't guaranteed to run in order.
Am I running in production mode?
If you remove the uglify line (and prior buffer) you will notice that both the dev and production builds are near identical in size - and match in line count.
The difference is the production version will be littered with:
"production" !== "production" ? [show dev error] : [no nothing]
Most reputable minify'ers (I believe) will strip out deadend code, such as the above, which will always result in false.
But really how do I tell?
Easiest method to be sure, would be goto the console of your running application and type:
React.createClass.toString();
The output should be:
"function (e){var t=function(e,t,n){this.__reactAutoBindMap&&c(this),"[....and more and more]
If you find the createClass in the react source, you will see:
createClass: function (spec) {
var Constructor = function (props, context, updater) {
// This constructor is overridden by mocks. The argument is used
// by mocks to assert on what gets mounted.
if ("production" !== 'production') {
"production" !== 'production' ? warning(this instanceof Constructor, 'Something is calling a React component directly. Use a factory or ' + 'JSX instead. See: react-legacyfactory') : undefined;
}
// Wire up auto-binding
if (this.__reactAutoBindMap) {
bindAutoBindMethods(this);
}
Notice how the console output skips straight through to this.__reactAutobind, because you are running in production mode, and using an minify'er, all the !== 'production' warngins and checks have been skipped over.
Unfortunately none of the above answers work, because setting process.env.NODE_ENV has no effect in Browserify. The resulting bundle still has process.env.NODE_ENV references in it and hence
Browserify will not require() the React production version modules,
the minifier will not be able to remove dead code, and
the application will still be running in debug mode.
This is unfortunately not the only place where this approach is offered as the correct answer :-(
The correct approach can be found in e.g.
https://github.com/hughsk/envify/issues/15#issuecomment-62229101
https://reactjs.org/docs/optimizing-performance.html#browserify
You need to switch the envify transform to be a global one, e.g.
# note the "-g" instead of the usual "-t"
$ browserify ... -g [ envify --NODE_ENV production ] ....
or in gulpfile.js
browserify(...)
...
.transform('envify', {
global: true, // also apply to node_modules
NODE_ENV: debug ? 'development' : 'production',
})
...
.bundle()
...
.pipe(gulpif(!debug, babelMinify())) // my example uses gulp-babel-minify
...
To set React in production mode you need to set your NODE_ENV variable to production and uglify your JS as an extra step.
You're already taking care of the uglification, for setting your NODE_ENV variable :
Set the variable while running the gulp task :
NODE_ENV='production' gulp
OR set it from inside your gulpfile by doing something like this :
gulp.task('set-production-env', function() {
return process.env.NODE_ENV = 'production';
});
Also you may use handy way with gulp-environments:
var environments = require('gulp-environments');
var production = environments.production;
gulp.src(paths.js)
.pipe(concat("app.js"))
// only minify the compiled JS in production mode
.pipe(production(uglify()))
.pipe(gulp.dest("./public/app/js/"));
To run gulp in production mode:
gulp build --env=production
I would like to configure and execute a task that copies a specified file in each dir recursively. In specific I would like to have "index.php" copied in each directory and subdirectory of my web project, if that doesn't exist. I've tried to use multidest and copy but multidestdoesn't seem to allow to specify paths by means of a wildcard (too bad!). How could I solve this? Thank you
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
multidest: {
tst: {
tasks: ['copy:indexphp'],
dest: '**/' //THIS DOESN'T WORK, I SHOULD SPECIFY ANY DIR/SUBDIR/ETC..
},
},
copy: {
indexphp: {
expand: true,
cwd: 'classes',
src: 'index.php',
filter: function (filepath) {
// NPM load file path module.
var path = require('path');
// Construct the destination file path.
var dest = path.join(
grunt.task.current.data.dest,
path.basename(filepath)
);
// Return false if the file exists.
return !(grunt.file.exists(dest));
}
},
[...]
The following code reads the contents of every subdirectory js inside of app/modules/ (eg. app/modules/module1/js/, app/modules/module2/js/, aso.)
this script worked before WITHOUT using the last command grunt.task.run('concat:' + dir);.
for a while now it stopped working so that i had to add a call to the task concat inside of the forEach loop.
normally I would have saved the new configuration inside of the concat configuration and call sometime later the resulting concat task.
grunt.registerTask('preparemodulejs', 'iterates over all module directories and compiles modules js files', function() {
// read all subdirectories from your modules folder
grunt.file.expand('./app/modules/*').forEach(function(dir){
// get the current concat config
var concat = grunt.config.get('concat') || {};
// set the config for this modulename-directory
concat[dir] = {
src: [dir + '/js/*.js', '!' + dir + '/js/compiled.js'],
dest: dir + '/js/compiled.js'
};
// save the new concat config
grunt.config.set('concat', concat);
grunt.task.run('concat:' + dir); // this line is new
});
});
what exactly changed in recent versions that i have to add an explicit task.run line?
and is there any way to write the settings of this task into the settings of an existing concat task so that if i have other manual additions to that configuration those won't run for each directory scanned?
thanks for help.
grunt.task.run(); despite it's name, does not run tasks. Grunt is always synchronous so grunt.task.run() will queue tasks to run after the current task has finished.
So I would avoid using grunt.task.run() within an array but rather build a list of tasks/targets to run afterward:
grunt.registerTask('preparemodulejs', 'iterates over all module directories and compiles modules js files', function() {
var tasks = [];
// read all subdirectories from your modules folder
grunt.file.expand('./app/modules/*').forEach(function(dir){
// get the current concat config
var concat = grunt.config.get('concat') || {};
// set the config for this modulename-directory
concat[dir] = {
src: [dir + '/js/*.js', '!' + dir + '/js/compiled.js'],
dest: dir + '/js/compiled.js'
};
// save the new concat config
grunt.config.set('concat', concat);
tasks.push('concat:' + dir);
});
// queues the tasks and run when this current task is done
grunt.task.run(tasks);
});
We can also provide a config directly here for different tasks to run on the go for bigger projects having multiple modules. Even if the we need to process files outside of the root directory:
grunt.registerTask('publishapp', 'uglify ivapp.js and upload to server', function (){
var tasks = [];
grunt.file.expand('../outerdirectory/').forEach(function(dir) {
// config for uglify that needs to execute before uploading on server
var uglify = {
options: {
compress: {
drop_console: true,
},
banner: '/* Banner you want to put above js minified code. */\n'
},
all: {
files: [{
expand: true,
cwd: '../',
src: ['somedir/some.js'],
dest: 'build',
ext: '.js',
extDot: 'last'
}]
}
};
// set grunt config : uglify
grunt.config.set('uglify', uglify);
});
// prepare a tasks list
tasks.push('uglify:all');
tasks.push('exec:publish');
// execute a tasks to perform
grunt.task.run(tasks);
});