package files by folder with gulp - javascript

I want this:
|-- guides
| |-- _guide.hbs
| |-- guide.hbs
| `-- index.hbs
|-- index.hbs
`-- noroute.hbs
turn into this:
|-- common.js
`-- guides.js
As you can see guides folder squashed into guides.js, and . folder squashed into common.js
Below is my ugly solution.
Inspired by this post
function getFolderMap(dir) {
var fs = require('fs');
var path = require('path');
var result = {};
fs.readdirSync(dir)
.filter(function(file) {
if( fs.statSync(path.join(dir, file)).isDirectory()) {
result[file] = file;
}
});
return result;
};
gulp.task('build-dev-templates3', function() {
var mergeStream = require('merge-stream')();
var templatePaths = getFolderMap(paths.src + '/templates');
templatePaths['./'] = 'common';
for (var src in templatePaths) {
var dst = templatePaths[src];
var stream = gulp.src([paths.src + '/templates/' + src + '**/*.hbs'])
.pipe($.process())
.pipe($.concat(dst + '.js'))
.pipe(gulp.dest(paths.dest + '/templates'));
mergeStream.add(stream);
}
return mergeStream;
});
What is the gulp way to achieve this? Please at least share some guideful tools.
Edit2
I need to get rid of getFolderMap function and solve this with pure
streams. The closest i can get is:
var directoryFilter = $.filter(function (file) {
return file.isDirectory();
});
gulp.src([paths.src + + '/templates/**'])
.pipe(directoryFilter)
.pipe(
//here I have list of directories
//I need a gulp plugin to get the contents
// so i can process them.
);
This issue is related https://github.com/gulpjs/gulp/issues/386.
Final Solution
This is my final solution based on spiralx's answer.
https://gist.github.com/eguneys/2fdbe7ac83dfab04748a

Something like this is my first guess, I use the forEachModule function at work to execute a sub-task for every module and then combine the results - helps with some plugins that have issues with paths and includes (gulp-stylus IIRC):
var gulp = require('gulp');
var plugins = require('gulp-load-plugins')();
var es = require('event-stream');
var path = require('path');
var config = require('./build.config');
// ------------------------------------
/*
* For each module execute subtaskFunc and get result stream, then
* combine all of the streams and return that.
*
* #param [{Object}] modules array of module objects
* #param {Function} subtaskFunc function(module) -> stream
* #return {Object} a stream
*/
function forEachModule(modules, subtaskFunc) {
var subtaskStreams = modules.map(subtaskFunc);
return es.concat.apply(null, subtaskStreams);
}
// ------------------------------------
gulp.task('templates', function() {
var dest = path.join(config.dest, 'templates');
return forEachModule(config.modules, function(module) {
var src = path.join(config.src, 'templates', module, '**/*.hbs');
// Compile .hbs files, wrap in a CommonJS module, combine into `<module>.js` and write to dest.
return gulp.src(src)
.pipe(plugins.handlebars().on('error', plugins.util.log))
.pipe(plugins.defineModule('plain'))
.pipe(plugins.declare({
namespace: 'app.templates.' + module
}))
.pipe(plugins.concat(module + '.js'))
.pipe(gulp.dest(dest));
})
// Combine all module template files into one?
.pipe(plugins.concat('templates.js'))
.pipe(gulp.dest(config.dest));
});
Edit:
This code is almost identical to what was there, but looks for directories under 'src/app' for modules instead of pre-defining the list, and doesn't generate any configuration outside of the gulp task function itself.
The reason it's like this to begin with was issues using gulp-stylus on a stream such as src/app/**/*.styl when a) using #include or anything referring to relative paths, or b) passing options to either Stylus itself, or a plugin such as Nib.
var modules = fs.readdirSync('src/app').filter(function(name) {
return fs.statSync('src/app/' + name).isDirectory()
});
// ------------------------------------
gulp.task('styles', function() {
var streams = modules.map(function(name) {
return gulp.src('src/app/' + name + '/**/*.styl')
.pipe(plugins.stylus({
use: [
require('nib')(),
require('bootstrap3-stylus')()
],
paths: [
'src/common'
]
}))
.pipe(plugins.concat(name + '.css'))
.pipe(gulp.dest('dist/css'))
.pipe(plugins.minifyCss())
.pipe(plugins.rename({
suffix: '.min'
}))
.pipe(gulp.dest('dist/css'));
});
return plugins.mergeStream(streams)
.pipe(plugins.concat('app.css'))
.pipe(gulp.dest('dist/css'));
});

Related

Gulp watch() not triggering the less process

The following gulp watch task isn't getting triggered when I change any LESS file in the project. Can anyone spot what I'm doing wrong? Most the answers here say to NOT use the watch-less module, which I'm not. It's supposed to listen to changes in any LESS file in the project and when one changes, go to the app.less file to regenerate the CSS file (app.less has #includes to all the files).
var watch = require("gulp-watch");
var less = require("gulp-less");
gulp.watch(paths.source + "**/*.less", function(event){
gulp.src(paths.source + paths.assets + paths.less + "app.less")
.pipe(less().on("error", console.log))
.pipe(gulp.dest(paths.dev + paths.css));
});
Here are some issues:
require("gulp-watch"); is useless here. Actually gulp.watch is a core API of gulp.
The gulpfile.js consists of several gulp tasks.
Run gulp watch in your terminal.
For example:
var gulp = require('gulp');
var path = require('path');
var less = require('gulp-less');
var paths = {
// your paths
};
gulp.task('styles', function () {
return gulp.src(paths.source + paths.assets + paths.less + "app.less")
.pipe(less({
// paths to be used for #import directives
paths: [ path.join(__dirname, 'less', 'includes') ]
}))
.pipe(gulp.dest('./'));
});
gulp.task('watch', function() {
gulp.watch('less/**/*.less', ['styles']);
});

Gulp inject not working one way but works the other - no difference is there?

I have a gulp task to inject bower components into my index.html file. I have 2 different versions of it and one works and one does not and I can't tell why the one does not work b/c it looks exactly the same to me.
Does NOT work
var gulp = require('gulp');
var inject = require('gulp-inject');
var mainBowerFiles = require('main-bower-files');
/**
* Injects all the bower dependencies into index.html
*/
gulp.task('inject-bower-files', function() {
return
gulp.src('./index.html')
.pipe(inject(gulp.src(mainBowerFiles(), {read: false})))
.pipe(gulp.dest('./'));
});
Works
var gulp = require('gulp');
var inject = require('gulp-inject');
var mainBowerFiles = require('main-bower-files');
/**
* Injects all the bower dependencies into index.html
*/
gulp.task('inject-bower-files', function() {
var target = gulp.src('./index.html');
var sources = gulp.src(mainBowerFiles(), {read: false});
return target.pipe(inject(sources))
.pipe(gulp.dest('./'));
});
What is the difference between these two?
Your first example is failing due to JavaScript's automatic semicolon insertion.
It is equivalent to
var gulp = require('gulp');
var inject = require('gulp-inject');
var mainBowerFiles = require('main-bower-files');
/**
* Injects all the bower dependencies into index.html
*/
gulp.task('inject-bower-files', function() {
return; // <-- semicolon inserted here, nothing below this runs
gulp.src('./index.html')
.pipe(inject(gulp.src(mainBowerFiles(), {read: false})))
.pipe(gulp.dest('./'));
});
To make it work, change it to
var gulp = require('gulp');
var inject = require('gulp-inject');
var mainBowerFiles = require('main-bower-files');
/**
* Injects all the bower dependencies into index.html
*/
gulp.task('inject-bower-files', function() {
// return is no longer on a line by itself
return gulp.src('./index.html')
.pipe(inject(gulp.src(mainBowerFiles(), {read: false})))
.pipe(gulp.dest('./'));
});

Node.js, require all modules in folder and use loaded module directly

In MyModule folder, I have this two JS files.
SayHello.js
module.exports.SayHello = function() {
return('Hello !');
}
SayByeBye.js
module.exports.SayByeBye = function() {
return('Bye Bye!');
}
In Node.js, I want to require all files in MyModule folder and call function SayHello & SayByeBye directly something like:
require(./MyModule)
console.log(SayHello());
console.log(SayByeBye());
EDIT:
With answer of #Yanick Rochon,I do this :
> ./app/my-module/index.js
global.SayHello = require('./my-module/SayHello').SayHello;
global.SayByeBye = require('./my-module/SayByeBye').SayByeBye;
> ./app/my-module/say-hello.js
module.exports.SayHello = function() {
return('Hello !');
};
> ./app/my-module/say-byebye.js
module.exports.SayByeBye = function() {
return('Bye Bye !');
};
> ./app/main.js
require('./my-module');
console.log(SayHello());
console.log(SayByeBye());
There's a section about global objects in the node documentation.
However, globals should be used with care. By adding modules to the global space I reduce testability and encapsulation. But in this case, I think using this method is acceptable.
First thing first...
I believe you are mistaking Node.js with PHP or .Net, in the sense that you don't "import" into the current module what is exported in other modules. Not unless you manually do it anyway. For example, when you call
require('./my-module');
(Note that I renamed your MyModule into Node.js naming convention.)
You don't load things into the current context; you just load the script and don't assign it to anything. To access what my-module exposes, you need to assign it, or use it directly. For example :
require('./my-module').someFunction();
or
var myModule = require('./my-module');
myModule.someFunction();
Modules are not namespaces, but JavaScript objects that exposes public properties outside of their own contexts (i.e. using module.exports = ...)
Answer
You have two most popular ways to accomplish this :
Solution 1
Create an index.json file inside your folder where you want to load all of your scripts. The returned JSON object should be all the modules to load automatically :
> ./app/index.json
[
"say-hello.js",
"say-goodbye.js"
]
You should also consider having all your files API compatible :
> ./app/say-hello.js
module.exports = function sayHello() {
return 'Hello !';
};
> ./app/say-goodbye.js
module.exports.sayGoodbye = function () {
return 'Goodbye !';
};
Then load and execute everything like this :
var path = require('path');
var basePath = './app/';
var files = require(basePath);
var mods = files.forEach(function (loaded, file) {
var mod = require(path.join(basePath, file));
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
}, {});
mods.sayHello();
mods.sayGoodbye();
Solution 2
Read all .js files inside your folder and import them. I highly recommend you use glob for this.
var glob = require("glob")
var path = require('path');
var basePath = './app/';
var mods = glob.sync(path.join(basePath, '*.js')).reduce(function (loaded, file) {
var mod = require(file);
// mod is a function with a name, so use it!
if (mod instanceof Function) {
loaded[mod.name] = mod;
} else {
Object.keys(mod).forEach(function (property) {
loaded[property] = mod.property;
});
}
return loaded;
}, {});
mods.sayHello();
mods.sayGoodbye();
Note on the difference between module.exports and exports
Typically module.exports === exports, but it is recommended to use module.exports for the following reason
exports = function Foo() { } // will not do anything
module.exports = function Foo() { } // but this will do what you expect
// however these two lines produce the same result
exports.foo = 'Bar';
module.exports.foo = 'Bar';
For this reason, module.exports is recommended in all cases.
It's not perfect, but something like this should help you accomplish this:
var fs = require('fs');
var path = require('path');
var files = fs.readdirSync(__dirname);
var ownFilename = __filename.substr(__filename.lastIndexOf(path.delimiter) + 1);
var modules = {};
for (var i = 0; i < files.length; i++) {
var filename = files[i];
if (filename.substr(-3) === '.js' && filename !== ownFilename) {
modules[filename.slice(0, -3)] = require('./' + filename);
}
}
console.log(modules.SayByeBye());
console.log(modules.SayHello());

Chain Gulp glob to browserify transform

I have a project with a few relatively disjoint pages, each including their own entry point script. These scripts require a number of others using commonjs syntax, and need to be transformed by 6to5 and bundled by browserify.
I would like to set up a gulp task that captures all the files matching a pattern and passes them on to the bundler, but I'm not sure how to pass files from gulp.src to browserify(filename).
My gulpfile looks like:
var gulp = require("gulp");
var browserify = require("browserify");
var to5browserify = require("6to5-browserify");
var source = require("vinyl-source-stream");
var BUNDLES = [
"build.js",
"export.js",
"main.js"
];
gulp.task("bundle", function () {
/* Old version, using glob:
return gulp.src("src/** /*.js")
.pipe(sixto5())
.pipe(gulp.dest("dist"));
*/
// New version, using array:
return BUNDLES.map(function (bundle) {
return browserify("./src/" + bundle, {debug: true})
.transform(to5browserify)
.bundle()
.pipe(source(bundle))
.pipe(gulp.dest("./dist"));
});
});
gulp.task("scripts", ["bundle"]);
gulp.task("html", function () {
return gulp.src("src/**/*.html")
.pipe(gulp.dest("dist"));
});
gulp.task("styles", function () {
return gulp.src("src/**/*.css")
.pipe(gulp.dest("dist"));
});
gulp.task("default", ["scripts", "html", "styles"]);
This seems to work, but isn't maintainable: I'll be adding more scripts relatively soon, and don't want to add them to the array every time.
I've tried using gulp.src(glob).pipe within the browserify call and piping after calling (shown here), and gulp.src(glob).map (method doesn't exist).
How can you chain gulp.src with a name-based transformer like browserify?
Use through2 to make a one-off custom plugin stream that does all of the dirty work.
Unfortanately vinyl-transform and vinyl-source-stream and the solutions that go along with those have flaws so we have to go for something custom.
var gulp = require('gulp');
var through = require('through2');
var browserify = require('browserify');
gulp.task('bundle', function() {
var browserified = function() {
return through.obj(function(chunk, enc, callback) {
if(chunk.isBuffer()) {
var b = browserify(chunk.path);
// Any custom browserify stuff should go here
//.transform(to5browserify);
chunk.contents = b.bundle();
this.push(chunk);
}
callback();
});
};
return gulp.src(['./src/**/*.js'])
.pipe(browserified())
.pipe(gulp.dest('dest'));
});
You can specify globs in your BUNDLES array as well as exclude any files:
var BUNDLES = [
"app/**/*.js",
"export.js",
"app/modules/**/*.js",
"!app/modules/excluded/*.js"
];

Iterating over directories with Gulp?

I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`

Categories

Resources