By default, browserify does not perform transforms on modules included from node_modules, i.e. with no path.
I made a quick github repo that illustrates it here. The index.js file that gets browserified looks like this:
var fs = require('fs');
var testmodule = require('testmodule');
var trg1 = document.getElementById("target1");
var trg2 = document.getElementById("target2");
trg1.innerHTML = fs.readFileSync(__dirname+"/something.txt");
trg2.innerHTML = testmodule();
testmodule looks like this:
var fs = require('fs');
exports = module.exports = function() {
return fs.readFileSync(__dirname+'/data.txt');
}
Using the brfs transform module, I want to be able to inline both calls to fs.readFileSync, but when I run browserify index.js -t brfs -o bundle.js, only the call in my main project gets inlined. Here is the bundle.js result:
;(function(e,t,n){function r(n,i){if(!t[n]){if(!e[n]){var s=typeof require=="function"&&require;if(!i&&s)return s(n,!0);throw new Error("Cannot find module '"+n+"'")}var o=t[n]={exports:{}};e[n][0](function(t){var i=e[n][1][t];return r(i?i:t)},o,o.exports)}return t[n].exports}for(var i=0;i<n.length;i++)r(n[i]);return r})({1:[function(require,module,exports){
// nothing to see here... no file methods for the browser
},{}],2:[function(require,module,exports){
var fs = require('fs');
var testmodule = require('testmodule');
var trg1 = document.getElementById("target1");
var trg2 = document.getElementById("target2");
trg1.innerHTML = "This is data from a file in the main project folder"; // TRANSFORMED
trg2.innerHTML = testmodule();
},{"fs":1,"testmodule":3}],3:[function(require,module,exports){
(function(__dirname){var fs = require('fs');
exports = module.exports = function() {
return fs.readFileSync(__dirname+'/data.txt'); // NO TRANSFORM
}
})("/node_modules/testmodule")
},{"fs":1}]},{},[2])
;
Got some help from substack (author of browserify) on this one. To specify if a module outside of a project requires transformations, you need to specify a browserify.transform array in your package.json. So for the example I gave above, the package.json file in the testmodule directory looks like this:
{
"name":"testmodule",
"version":"0.0.0",
"browserify": {
"transform": ["brfs"]
},
"main": "index.js"
}
You can also use browserify -g brfs instead of browserify -t brfs. g is a global transform (which applies to dependencies)
Related
I am using the gulp and hercule package on node.js to transclude some plain text files. On Unix, everything seems to work fine. However, some coworkers are having issues running it on Windows. They are getting the following error message only when running on Windows:
[13:02:01] TypeError: Cannot read property 'toString' of null at Object.transcludeStringSync (D:\project\node_modules\hercule\lib\hercule.js:136:36)
I have tried the above with hercule#3.0.5 as well as hercule#2.0.5, and both packages give the above error. However, given that this occurs only on Windows and across many versions of the package, I suspect this issue has something to due with the Node.js installation or path.
The code that is using the hercule package:
var fs = require('fs');
var path = require('path');
var gulp = require('gulp');
var drakov = require('drakov');
var hercule = require('hercule');
gulp.task('mock', ['i18n','build_minify_no_tests'], function() {
var mockSpecificationTemplate= fs.readFileSync('test/mock/mock-template.apib','utf8');
var transcludedMockSpecification = hercule.transcludeStringSync(mockSpecificationTemplate, {
relativePath: path.resolve('../../../')
});
fs.writeFileSync('test/mock/mock.apib', transcludedMockSpecification, 'utf-8');
// Running mock server
var drakovArgv = {
sourceFiles: 'test/mock/mock.apib',
serverPort: 9000,
staticPaths: [
'../../'
],
discover: true,
watch: true
};
drakov.run(drakovArgv);
});
node and npm version information:
$ node -v
v6.3.0
$ npm -v
3.10.3
hercule.transcludeStringSync simply runs another hercule process and sends input to it:
const result = childProcess.spawnSync('../bin/hercule', syncArgs, syncOptions);
with the script ../bin/hercule:
#!/usr/bin/env node
"use strict";
require('../lib/main.js');
...obviously doesn't work on Windows
If that task must be synchronized, you may use the following function instead:
function transcludeStringSync(input, options) {
const {dirname, join} = require('path')
const hercule = join(dirname(require.resolve('hercule')), 'main')
const args = [hercule, '--reporter', 'json-err']
for (let name in options) {
args.push(`--${name}`, `--${options[name]}`)
}
const result = require('child_process').spawnSync('node', args, {input})
const err = result.stderr.toString()
if (err) throw new Error('Could not transclude input')
return result.stdout.toString()
}
I'm pretty new to Gulp, but by following this tutorial I set up a Gulp task that is meant to browserify javascript files in a particular directory and pipe them to a different directory - pretty simple. I've looked a few other tutorials, but this method seemed to be the most concise. Here is my code:
var gulp = require('gulp');
var browserify = require('browserify');
var transform = require('vinyl-transform');
gulp.task('js', function() {
var browserified = transform(function(filename) {
return browserify(filename).bundle();
});
return gulp.src(['./public/js/src/**/*.js'])
.pipe(browserified)
.pipe(gulp.dest('./public/js/dist'));
});
The above code is very similar to many other implementations of this sort I've seen, but when I try running it with gulp js, it produces the following error:
[15:47:13] Using gulp file
~/development/launchpad/workshop/gulpfile.js
[15:47:13] Starting 'js'...
_stream_readable.js:540
var ret = dest.write(chunk);
^
TypeError: undefined is not a function
at Producer.ondata (_stream_readable.js:540:20)
at Producer.emit (events.js:107:17)
at Producer.Readable.read (_stream_readable.js:373:10)
at flow (_stream_readable.js:750:26)
at resume_ (_stream_readable.js:730:3)
at _stream_readable.js:717:7
at process._tickCallback (node.js:355:11)
Does anyone know what might cause this error?
(As a side note, I'd like to look at the files from the stack trace to try to figure out what is going on here, but searching for _stream_readable.js in Spotlight yields about 20 files of that name, all seemingly Node modules. Is there a way to determine the full path of a file in a stack trace?)
var browserify = require('browserify');
var gulp = require('gulp');
var source = require('vinyl-source-stream');
gulp.task('browserify', function() {
return browserify('lib/front/app.js')
.bundle()
//Pass desired output filename to vinyl-source-stream
.pipe(source('bundle.js'))
// Start piping stream to tasks!
.pipe(gulp.dest('public/build/'));
});
If you want browserify to work with gulp. dest and create a file where we specify it via .pipe (gulp.dest ('src/js')),
then you need to download vinyl-source-stream and throw it in .pipe(source('bundle.js')),
but actually in browserify, namely the bundle method accepts callback and neither dest nor source is needed
browserify({
entries: jsFile,
basedir: "src/js/dev",
debug: true,
})
.transform(babelify, {
presets: ['#babel/preset-env'],
})
.bundle((err, buffer) => {
let event = new EventEmitter();
if (err) {
event.emit('error',err)
}
else {
let data = minify(buffer.toString(), {}).code;
fs.createWriteStream('./src/js/bundle.js').write(data)
console.dir(222);
bs.reload()
}
})
Unfortunately, this is an issue with browserify/gulp, and there's nothing that vinyl-transform can do. The solution is to use vinyl-source-stream and vinyl-buffer:
var gulp = require('gulp');
var browserify = require('browserify');
var source = require('vinyl-source-stream');
var glob = require('node-glob');
gulp.task('browserify', function (cb) {
glob('./src/**/*.js', {}, function (err, files) {
var b = browserify();
files.forEach(function (file) {
b.add(file);
});
b.bundle().
.pipe(source('output.js'))
.pipe(gulp.dest('./dist'));
cb();
})
});
More information here.
I'm trying to add livereload to broccoli
Unfortunately the live-reload plugin documentation is a bit short and I cannot get it to work. In the docs it is stated to do the following:
var injectLivereload = require('broccoli-inject-livereload');
var public = injectLivereload('public');
I figured that this should be placed inside the Brocfile.js (right?). But whatever I do nothing gets reloaded (I have to hit reload to refresh) I've also changed the 'public' part, which I think is representing a directory. Any help would be appreciated.
I'm using BrowserSync instead of "only" LiveReload. It also supports LiveReload (and LiveInject for CSS), but it has tons of other features as well (like ghosting).
Let's create a file called server.js and a folder called app next to it, where you put our index.html, .css and .js. This server.js contains:
var broccoli = require("broccoli");
var brocware = require("broccoli/lib/middleware");
var mergeTrees = require("broccoli-merge-trees");
var Watcher = require("broccoli-sane-watcher");
var browserSync = require("browser-sync");
var tree = mergeTrees(["app"]); // your public directory
var builder = new broccoli.Builder(tree);
var watcher = new Watcher(builder);
watcher.on("change", function(results) {
if (!results.filePath) return;
// Enable CSS live inject
if (results.filePath.indexOf("css") > -1) {
return browserSync.reload("*.css");
}
browserSync.reload();
});
browserSync({
server: {
baseDir: "./",
middleware: brocware(watcher)
}
});
Now fire the server (which will open the browser automatically):
node server.js
I know this isn't as straightforward as Gulp or Grunt at first sight, but it offers fine grained control over everything and it's really blazing fast, even if your app grows and grows.
Instead of Livereload I opted to use Browsersync via the Broccoli Browser Sync plugin
My final Brocfile.js was very similar to (pulled from plugins npm page):
var fastBrowserify = require('broccoli-fast-browserify');
var babelify = require('babelify');
var mergeTrees = require('broccoli-merge-trees');
var compileSass = require('broccoli-sass-source-maps');
var funnel = require('broccoli-funnel');
var BrowserSync = require('broccoli-browser-sync');
var optionalTransforms = [
'regenerator'
// 'minification.deadCodeElimination',
// 'minification.inlineExpressions'
];
var babelOptions = {stage: 0, optional: optionalTransforms, compact: true};
// var browserifyOpts = {deps: true, entries: files, noParse: noParse, ignoreMissing: true};
var transformedBabelify = fastBrowserify('app', {
browserify: {
extensions: [".js"]
},
bundles: {
'js/app.js': {
entryPoints: ['app.js'],
transform: {
tr: babelify,
options: {
stage: 0
}
}
}
}
});
var appCss = compileSass(['piggy/frontend/app'], 'main.scss', 'css/app.css');
var staticFiles = funnel('frontend', {
srcDir: 'static'
});
var browserSync = new BrowserSync([staticFiles, transformedBabelify, appCss]);
module.exports = mergeTrees([staticFiles, transformedBabelify, appCss, browserSync]);
With this solution I was able to continue using broccoli to serve my assets via broccoli serve and all my assets would be rebuilt then reloaded in the browser including my css.
I have a project with a few relatively disjoint pages, each including their own entry point script. These scripts require a number of others using commonjs syntax, and need to be transformed by 6to5 and bundled by browserify.
I would like to set up a gulp task that captures all the files matching a pattern and passes them on to the bundler, but I'm not sure how to pass files from gulp.src to browserify(filename).
My gulpfile looks like:
var gulp = require("gulp");
var browserify = require("browserify");
var to5browserify = require("6to5-browserify");
var source = require("vinyl-source-stream");
var BUNDLES = [
"build.js",
"export.js",
"main.js"
];
gulp.task("bundle", function () {
/* Old version, using glob:
return gulp.src("src/** /*.js")
.pipe(sixto5())
.pipe(gulp.dest("dist"));
*/
// New version, using array:
return BUNDLES.map(function (bundle) {
return browserify("./src/" + bundle, {debug: true})
.transform(to5browserify)
.bundle()
.pipe(source(bundle))
.pipe(gulp.dest("./dist"));
});
});
gulp.task("scripts", ["bundle"]);
gulp.task("html", function () {
return gulp.src("src/**/*.html")
.pipe(gulp.dest("dist"));
});
gulp.task("styles", function () {
return gulp.src("src/**/*.css")
.pipe(gulp.dest("dist"));
});
gulp.task("default", ["scripts", "html", "styles"]);
This seems to work, but isn't maintainable: I'll be adding more scripts relatively soon, and don't want to add them to the array every time.
I've tried using gulp.src(glob).pipe within the browserify call and piping after calling (shown here), and gulp.src(glob).map (method doesn't exist).
How can you chain gulp.src with a name-based transformer like browserify?
Use through2 to make a one-off custom plugin stream that does all of the dirty work.
Unfortanately vinyl-transform and vinyl-source-stream and the solutions that go along with those have flaws so we have to go for something custom.
var gulp = require('gulp');
var through = require('through2');
var browserify = require('browserify');
gulp.task('bundle', function() {
var browserified = function() {
return through.obj(function(chunk, enc, callback) {
if(chunk.isBuffer()) {
var b = browserify(chunk.path);
// Any custom browserify stuff should go here
//.transform(to5browserify);
chunk.contents = b.bundle();
this.push(chunk);
}
callback();
});
};
return gulp.src(['./src/**/*.js'])
.pipe(browserified())
.pipe(gulp.dest('dest'));
});
You can specify globs in your BUNDLES array as well as exclude any files:
var BUNDLES = [
"app/**/*.js",
"export.js",
"app/modules/**/*.js",
"!app/modules/excluded/*.js"
];
I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`