I am learning browserify and I am trying to do two basic things with it:
Transform (via shim) non-CommonJS modules for ease-of-use and dependency tracking
Bundle the libraries that are project-specific
I've found a working process for how to do all of this and automate it with Gulp. This works and produces the right output but, I am curious if it could be made simpler. It seems like I have to duplicate a lot of configuration on the project-based bundles. Here is the working example:
package.json
invalid comments added for clarification
{
//project info and dependencies omitted
//https://github.com/substack/node-browserify#browser-field
"browser": { //tell browserify about some of my libraries and where they reside
"jquery": "./bower_components/jquery/dist/jquery.js",
"bootstrap": "./bower_components/bootstrap/dist/js/bootstrap.js"
},
"browserify": {
//https://github.com/substack/node-browserify#browserifytransform
"transform": [
"browserify-shim"
]
},
"browserify-shim": {
//shim the modules defined above as needed
"jquery": {
"exports": "$"
},
"bootstrap": {
"depends": "jquery:$"
}
}
}
config.js
contains all task-runner related configuration settings
module.exports = {
browserify: {
// Enable source maps and leave un-ulgified
debug: true,
extensions: [],
//represents a separate bundle per item
bundleConfigs: [
{
//I really want to refer to the bundles here made in the package.json but
//if I do, the shim is never applied and the dependencies aren't included
entries: ['/bundles/shared-bundle.js'],
dest: '/dist/js',
outputName: 'shared.js'
}
]
},
//...
};
shared-bundle.js
acts as a bundling file where node loads the dependencies and at this point, the shim has been applied
require('bootstrap');
browserify-task.js
contains the browserify bundling gulp task
//module requires omitted
gulp.task('browserify', function (callback) {
var bundleQueue = config.bundleConfigs.length;
var browserifyBundle = function (bundleConfig) {
var bundler = browserify({
entries: bundleConfig.entries,
extensions: config.extensions,
debug: config.debug,
});
var bundle = function () {
return bundler.bundle()
// Use vinyl-source-stream to make the stream gulp compatible
.pipe(source(bundleConfig.outputName))
// Specify the output destination
.pipe(gulp.dest(bundleConfig.dest))
.on('end', reportFinished);
};
var reportFinished = function () {
if (bundleQueue) {
bundleQueue--;
if (bundleQueue === 0) {
// If queue is empty, tell gulp the task is complete
callback();
}
}
};
return bundle();
};
config.bundleConfigs.forEach(browserifyBundle);
});
In config.js where the first bundleConfig item's entries is a source to a file that has the require() modules, I'd like replace those with module names of modules defined in the package.json browser key.
In the config.js, if I change the bundle configuration to:
bundleConfigs: [
{
entries: ['bootstrap'],
dest: '/dist/js',
outputName: 'shared.js'
}
]
and run the gulp task, it will include bootstrap.js but it doesn't run the shim transformation. jQuery is not being included at all.
This leaves me with a few questions:
Is there a better way to be bundling my js for use in a non-SPA application (ie am I going about this the wrong way)?
If not, is there a way to ensure the shim transformation is run prior to the bundling so that I can have my bundle configuration in one place?
Certainly, you just have to tell your gulp file that it should shim first. Looks like you can add your own shim object when calling browserify from your gulp file. Check out this example
If you want to ensure everything is shimmed before you bundle them, use the deps array: "An array of tasks to be executed and completed before your task will run."
It would look something like this:
gulp.task('shim', function() {
// ...
});
gulp.task('browserify', ['shim'], function(){
// ...
});
Related
I'm trying to move from Gulp to Webpack. In Gulp I have task which copies all files and folders from /static/ folder to /build/ folder. How to do the same with Webpack? Do I need some plugin?
Requiring assets using the file-loader module is the way webpack is intended to be used (source). However, if you need greater flexibility or want a cleaner interface, you can also copy static files directly using my copy-webpack-plugin (npm, Github). For your static to build example:
const CopyWebpackPlugin = require('copy-webpack-plugin');
module.exports = {
context: path.join(__dirname, 'your-app'),
plugins: [
new CopyWebpackPlugin({
patterns: [
{ from: 'static' }
]
})
]
};
Compatibility note: If you're using an old version of webpack like webpack#4.x.x, use copy-webpack-plugin#6.x.x. Otherwise use latest.
You don't need to copy things around, webpack works different than gulp. Webpack is a module bundler and everything you reference in your files will be included. You just need to specify a loader for that.
So if you write:
var myImage = require("./static/myImage.jpg");
Webpack will first try to parse the referenced file as JavaScript (because that's the default). Of course, that will fail. That's why you need to specify a loader for that file type. The file- or url-loader for instance take the referenced file, put it into webpack's output folder (which should be build in your case) and return the hashed url for that file.
var myImage = require("./static/myImage.jpg");
console.log(myImage); // '/build/12as7f9asfasgasg.jpg'
Usually loaders are applied via the webpack config:
// webpack.config.js
module.exports = {
...
module: {
loaders: [
{ test: /\.(jpe?g|gif|png|svg|woff|ttf|wav|mp3)$/, loader: "file" }
]
}
};
Of course you need to install the file-loader first to make this work.
If you want to copy your static files you can use the file-loader in this way :
for html files :
in webpack.config.js :
module.exports = {
...
module: {
loaders: [
{ test: /\.(html)$/,
loader: "file?name=[path][name].[ext]&context=./app/static"
}
]
}
};
in your js file :
require.context("./static/", true, /^\.\/.*\.html/);
./static/ is relative to where your js file is.
You can do the same with images or whatever.
The context is a powerful method to explore !!
One advantage that the aforementioned copy-webpack-plugin brings that hasn't been explained before is that all the other methods mentioned here still bundle the resources into your bundle files (and require you to "require" or "import" them somewhere). If I just want to move some images around or some template partials, I don't want to clutter up my javascript bundle file with useless references to them, I just want the files emitted in the right place. I haven't found any other way to do this in webpack. Admittedly it's not what webpack originally was designed for, but it's definitely a current use case.
(#BreakDS I hope this answers your question - it's only a benefit if you want it)
Webpack 5 adds Asset Modules which are essentially replacements for common file loaders. I've copied a relevant portion of the documentation below:
asset/resource emits a separate file and exports the URL. Previously achievable by using file-loader.
asset/inline exports a data URI of the asset. Previously achievable by using url-loader.
asset/source exports the source code of the asset. Previously achievable by using raw-loader.
asset automatically chooses between exporting a data URI and emitting a separate file. Previously achievable by using url-loader with asset size limit.
To add one in you can make your config look like so:
// webpack.config.js
module.exports = {
...
module: {
rules: [
{
test: /\.(jpe?g|gif|png|svg|woff|ttf|wav|mp3)$/,
type: "asset/resource"
}
]
}
};
To control how the files get output, you can use templated paths.
In the config you can set the global template here:
// webpack.config.js
module.exports = {
...
output: {
...
assetModuleFilename: '[path][name].[hash][ext][query]'
}
}
To override for a specific set of assets, you can do this:
// webpack.config.js
module.exports = {
...
module: {
rules: [
{
test: /\.(jpe?g|gif|png|svg|woff|ttf|wav|mp3)$/,
type: "asset/resource"
generator: {
filename: '[path][name].[hash][ext][query]'
}
}
]
}
};
The provided templating will result in filenames that look like build/images/img.151cfcfa1bd74779aadb.png. The hash can be useful for cache busting etc. You should modify to your needs.
Above suggestions are good. But to try to answer your question directly I'd suggest using cpy-cli in a script defined in your package.json.
This example expects node to somewhere on your path. Install cpy-cli as a development dependency:
npm install --save-dev cpy-cli
Then create a couple of nodejs files. One to do the copy and the other to display a checkmark and message.
copy.js
#!/usr/bin/env node
var shelljs = require('shelljs');
var addCheckMark = require('./helpers/checkmark');
var path = require('path');
var cpy = path.join(__dirname, '../node_modules/cpy-cli/cli.js');
shelljs.exec(cpy + ' /static/* /build/', addCheckMark.bind(null, callback));
function callback() {
process.stdout.write(' Copied /static/* to the /build/ directory\n\n');
}
checkmark.js
var chalk = require('chalk');
/**
* Adds mark check symbol
*/
function addCheckMark(callback) {
process.stdout.write(chalk.green(' ✓'));
callback();
}
module.exports = addCheckMark;
Add the script in package.json. Assuming scripts are in <project-root>/scripts/
...
"scripts": {
"copy": "node scripts/copy.js",
...
To run the sript:
npm run copy
The way I load static images and fonts:
module: {
rules: [
....
{
test: /\.(jpe?g|png|gif|svg)$/i,
/* Exclude fonts while working with images, e.g. .svg can be both image or font. */
exclude: path.resolve(__dirname, '../src/assets/fonts'),
use: [{
loader: 'file-loader',
options: {
name: '[name].[ext]',
outputPath: 'images/'
}
}]
},
{
test: /\.(woff(2)?|ttf|eot|svg|otf)(\?v=\d+\.\d+\.\d+)?$/,
/* Exclude images while working with fonts, e.g. .svg can be both image or font. */
exclude: path.resolve(__dirname, '../src/assets/images'),
use: [{
loader: 'file-loader',
options: {
name: '[name].[ext]',
outputPath: 'fonts/'
},
}
]
}
Don't forget to install file-loader to have that working.
You can write bash in your package.json:
# package.json
{
"name": ...,
"version": ...,
"scripts": {
"build": "NODE_ENV=production npm run webpack && cp -v <this> <that> && echo ok",
...
}
}
Most likely you should use CopyWebpackPlugin which was mentioned in kevlened answer. Alternativly for some kind of files like .html or .json you can also use raw-loader or json-loader. Install it via npm install -D raw-loader and then what you only need to do is to add another loader to our webpack.config.js file.
Like:
{
test: /\.html/,
loader: 'raw'
}
Note: Restart the webpack-dev-server for any config changes to take effect.
And now you can require html files using relative paths, this makes it much easier to move folders around.
template: require('./nav.html')
I was stuck here too. copy-webpack-plugin worked for me.
However, 'copy-webpack-plugin' was not necessary in my case (i learned later).
webpack ignores root paths
example
<img src="/images/logo.png'>
Hence, to make this work without using 'copy-webpack-plugin'
use '~' in paths
<img src="~images/logo.png'>
'~' tells webpack to consider 'images' as a module
note:
you might have to add the parent directory of images directory in
resolve: {
modules: [
'parent-directory of images',
'node_modules'
]
}
Visit https://vuejs-templates.github.io/webpack/static.html
The webpack config file (in webpack 2) allows you to export a promise chain, so long as the last step returns a webpack config object. See promise configuration docs. From there:
webpack now supports returning a Promise from the configuration file. This allows to do async processing in you configuration file.
You could create a simple recursive copy function that copies your file, and only after that triggers webpack. E.g.:
module.exports = function(){
return copyTheFiles( inpath, outpath).then( result => {
return { entry: "..." } // Etc etc
} )
}
lets say all your static assets are in a folder "static" at the root level and you want copy them to the build folder maintaining the structure of subfolder, then
in your entry file) just put
//index.js or index.jsx
require.context("!!file?name=[path][name].[ext]&context=./static!../static/", true, /^\.\/.*\.*/);
In my case I used webpack for a wordpress plugin to compress js files, where the plugin files are already compressed and need to skip from the process.
optimization: {
minimize: false,
},
externals: {
"jquery": "jQuery",
},
entry: glob.sync('./js/plugin/**.js').reduce(function (obj, el) {
obj[path.parse(el).name] = el;
return obj
}, {}),
output: {
path: path.resolve(__dirname, './js/dist/plugin'),
filename: "[name].js",
clean: true,
},
That used to copy the js file as it is to the build folder. Using any other methods like file-loader and copy-webpack create issues with that.
Hope it will help someone.
I need to execute one JavaScript function before the Webpack starts its building process. The function just takes .scss files and concatenate them into one.
After that Webpack should take the result file. Is there an option to do that?
At the moment I run the function before the module.exports in webpack.config.js, but it seems that its not synchronous operation. Module.exports execute before the concat() function ends and Webpack can't find .scss file.
function concat(opts) {
(...)
}
concat({ src : styles, dest : './css/style.scss' });
module.exports = [
(...)
]
It seems a little bit odd to concat scss files before running Webpack as those kind of operations are usually handled by Webpack itself.
That being said, there's a few way of solving this.
The most obvious way would be to extract the concat parts to a separate file (e.g. prepare.js) and then run start the build process by running something along this line: node prepare.js && webpack. That'll first run prepare and if that exits without error webpack will be run. Usually that'll be added to the scripts part of your package.json, e.g.
"scripts": {
"build": "node prepare.js && webpack"
}
To achieve the same but in a more Webpack integrated way you could do the same thing where you extract the concat part to a separate file and then let Webpack execute that file, before build starts, with the help of Webpack Shell Plugin, e.g.
const WebpackShellPlugin = require('webpack-shell-plugin');
module.exports = {
...
plugins: [
new WebpackShellPlugin({
onBuildStart:['node prepare.js']
})
],
...
}
You can add any code at any phase of the building, using the Compiler Hooks.
The compile hook is called before (and every time) the compilation begins, so you probably want to use that:
config = {
//...
plugins: [
{
apply: (compiler) => {
compiler.hooks.compile.tap("MyPlugin_compile", () => {
console.log("This code is executed before the compilation begins.");
});
},
},
],
//...
};
I've been using gulp for a while now and know how to import another node module, e.g.
var sass = require('gulp-sass');
That's fine, but my gulpfile is filling up with code that I'd like to move into a separate file and "require". Specifically I am writing a postcss plugin, which I already have working when declared as a function inside of the gulpfile. My question is how to put my function in an external file and require it like I do a node module. Do I need to "export" the function in the file being required? Do I need to use ES6 modules or something like that?
As an aside, I realise that if i was doing this probably I would either (A) turn this into a proper node module and put it on a private NPM repository, but that seems unnecessary, or (B) turn it into a proper gulp plugin, but that would require learning how to author a gulp plugin and learning about streams and stuff. Both of these are probably better but would take more time so I've decided to just keep the function simple and local for now.
First create a new js file (here ./lib/myModule.js):
//./lib/myModule.js
module.exports = {
fn1: function() { /**/ },
fn2: function() { /**/ },
}
You could also pass some arguments to your module:
// ./lib/myAwesomeModule.js
var fn1 = function() {
}
module.exports = function(args) {
fn1: fn1,
fn2: function() {
// do something with the args variable
},
}
Then require it in your gulpfile:
//gulpfile.js
var myModule = require('./lib/myModule')
// Note: here you required and call the function with some parameters
var myAwesomeModule = require('./lib/myAwesomeModule')({
super: "duper",
env: "development"
});
// you could also have done
/*
var myAwesomeModuleRequire = require('./lib/myAwesomeModule')
var myAwesomeModule = myAwesomeModuleRequire({
super: "duper",
env: "development"
});
*/
gulp.task('test', function() {
gulp.src()
.pipe(myModule.fn1)
.pipe(myAwesomeModule.fn1)
.gulp.dest()
}
First, you have to add export default <nameOfYourFile> at the end of your file
Then to use it, write import gulp from 'gulp'
If you have an error message, install babel-core and babel-preset-es2015 with NPM, and add a preset "presets": ["es2015"] in your .babelrc config file.
I fix my problem by install:
npm i babel-plugin-add-module-exports
Then i add "plugins": [["add-module-exports"]] to the .babelrc
I try to write these code
gulp.task('script', function() {
'use strict'
return gulp.src(['app.js', 'components/**/*.jsx'])
.pipe(babel())
.pipe(browserify())
.pipe(gulp.dest("dist"));
});
but it shows some error:
SyntaxError:
/Users/Zizy/Programming/learn-react-js/components/CommentBox.jsx:58
<div className="commentBox">
^
ParseError: Unexpected token
at wrapWithPluginError (/Users/Zizy/Programming/learn-react-js/node_modules/gulp-browserify/index.js:44:10)
It seems that before .pipe(browserify()) the gulp did't transform the jsx code. But if I just remove .pipe(browserify()) I find that did transform, just cannot let babel and browserify work together.
I know maybe I can use like babelify or browserify plugin for babel though, I just want figure out the reason.
gulp-browserify doesn't quite work like that. You don't give it a bunch of buffers to collect and bundle.
You give it one file—the entry file—which it passes into Browserify. Browserify checks to see what other files the entry file references, then loads those files directly from the file system, meaning that you can't modify them with gulp plugins beforehand.
So, really, if we pretend you don't want to use Babel on your source files, your gulpfile should look like this, only passing in the entry file:
gulp.task('script', function() {
'use strict'
return gulp.src('app.js')
.pipe(browserify())
.pipe(gulp.dest("dist"));
});
However, note that gulp-browserify is no longer maintained, and this is exactly why. gulp plugins aren't supposed to read directly from the file system. That's why you're supposed to use Browserify (or, in your case, Babelify) directly with vinyl-source-stream as recommended in the gulp recipes. It's more idiomatic and less confusing.
That wraps up my answer to your question, but I'd like to add: if you're using the ES2015 module syntax (and you probably should be), there's a better way to do this. Browserify wraps all your modules separately in a bunch of code to make the programmatic CommonJS API work properly, but ES2015 modules have a declarative syntax, which makes it much easier for tools to operate on them statically. There's a tool called Rollup that takes advantage of this, allowing it to produce bundles that are smaller, faster, and more minfication-friendly than Browserify's.
Here's how you might use it with gulp:
var gulp = require('gulp'),
rollup = require('rollup-stream'),
babel = require('gulp-babel'),
source = require('vinyl-source-stream'),
buffer = require('vinyl-buffer');
gulp.task('script', function() {
return rollup({entry: 'app.js'})
.pipe(source('app.js'))
.pipe(buffer())
.pipe(babel())
.pipe(gulp.dest('dist'));
});
Starting from Babel 6 you need to declare the presets manually, check this.
Basically, in the root of your project you need a .babelrc with the following content:
{
"presets": [ "es2015", "react" ]
}
And the corresponding npm modules in package.json:
// package.json
{
"devDependencies": {
...
"babel-preset-es2015": "^6.1.18",
"babel-preset-react": "^6.1.18",
...
}
}
Here is a sample repository with gulp, babel and browserify
Following is the code snippet
gulp.task("js", (done) => {
const bundler = browserify({ entries: paths.js.source }, { debug: true }).transform(babel);
bundler.bundle()
.on("error", function (err) { console.error(err); this.emit("end"); })
.pipe(source(paths.build.destMinJSFileName))
.pipe(buffer())
.pipe(sourcemaps.init({ loadMaps: true }))
.pipe(uglify())
.pipe(sourcemaps.write(paths.js.destMapFolder))
.pipe(gulp.dest(paths.build.destBuildFolder));
done();
});
I'm trying to use grunt-newer to watch files from a folder and if any is changed, trigger a custom task.
I have something like this in my Gruntfile.js:
grunt.initConfig({
watch: {
widgets: {
files: "/somepath/*.js",
tasks: ['newer:mycustomtask']
}
}
});
grunt.registerTask("mycustomtask", ["description of my task"], function() {
console.log("me has been triggered");
});
Whenever I run "grunt watch", I have this output:
Running "watch" task
Waiting...
File "/somepath/WidgetA.js" changed.
Running "newer:mycustomtask" (newer) task
Fatal error: The "newer" prefix is not supported for aliases
I googled but didn't found anything about this. Anyone knows how could I implement this? I need to now in my "customtask" which files have been changed
If you reference a task (inside watch or concurrent e.g.) which is either not installed or not configured you get this error output.
This happens often when you copy-paste a watch config from a different project.
I came across a similar requirement and the solution I ended up with is roughly as follows. Let's assume that the project structure is:
Gruntfile.js
package.json
src/
config.js
data.js
tasks/
customtask.js
Here, the src directory contains data which will be monitored by watch, while the definition of the custom task is stored in tasks/customtask.js. For the purpose of this example, this task will only print the file names of the changed files:
var fs = require('fs');
var path = require('path');
module.exports = function(grunt) {
grunt.registerMultiTask('customtask', function() {
var done = this.async();
if(!this.files){ done(); return; }
this.files[0].src.forEach(file_name => {
console.log(file_name);
});
done();
});
};
Now, Gruntfile.js looks like:
module.exports = function(grunt) {
const files = ['src/config.js', 'src/data.js'];
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
customtask: {
release: {
src: files
}
},
watch: {
data: {
files: files,
tasks: ['customtask:release']
},
options: {
spawn: false
}
}
});
grunt.loadTasks('tasks');
grunt.loadNpmTasks('grunt-contrib-watch');
var changedFiles = Object.create(null);
var onChange = grunt.util._.debounce(function() {
grunt.config('customtask.release.src', Object.keys(changedFiles));
changedFiles = Object.create(null);
}, 200);
grunt.event.on('watch', function(action, filepath) {
changedFiles[filepath] = action;
onChange();
});
grunt.registerTask('build', ['watch:data']);
};
here, it specifies that:
the files of interest are ['src/config.js', 'src/data.js']
that our customtask operates in principle on these files (in case it would be invoked directly)
that watch is supposed to observe these files and launch customtask:release whenever something changes
grunt.loadTasks('tasks') loads all "tasks definitions" from the directory tasks, i.e., here only the customtask
grunt.registerTask('build', ['watch:data']) defines a "shortcut" for watch:data
Finally, in order to invoke customtask only for the changed files, this example uses the strategy employed in the documentation in the section "Compiling files as needed". In loose terms, it assembles all changed files in an object the keys of which are then used to modify the src property of the customtask on-the-fly.
Running grunt build then initiates the "watch". If one runs in another terminal window for example touch src/*.js, the output is:
Running "watch:data" (watch) task
Waiting...
>> File "src/config.js" changed.
>> File "src/data.js" changed.
Running "customtask:release" (customtask) task
src/config.js
src/data.js
where the last two lines come from customtask...
You just need to have a config entry (even an empty one) for your task:
grunt.initConfig({
mycustomtask: {
},
watch: {
widgets: {
files: "/somepath/*.js",
tasks: ['newer:mycustomtask']
}
}
});