Iterating over directories with Gulp? - javascript

I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.

There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});

You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});

I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.

First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features

I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`

Related

How to rename a zip file after an HTML file in a Gulp task

I am attempting to zip up all files in my dist folder using gulp-zip and would like to dynamically name the zip file after the only html file in that directory. Here's my attempt using gulp-filenames, but have had no luck.
const rename = require('gulp-rename');
const zip = require('gulp-zip');
var filenames = require('gulp-filenames');
gulp.task('zipp', function(){
gulp.src('dist/*')
.pipe(zip('_final.zip'))
.pipe(rename({prefix:getHtmlName()}))
.pipe(gulp.dest('./dist'))
})
function getHtmlName(){
gulp.src("./src/*.html")
.pipe(filenames("html"))
.pipe(gulp.dest("./dist"));
return filenames.get("html");
}
Here are two ways:
const gulp = require('gulp');
const rename = require('gulp-rename');
const zip = require('gulp-zip');
const filenames = require('gulp-filenames');
// run the getHTMLname task first, filenames will then have the data stored in an array
gulp.task('zipp', ['getHTMLname'], function () {
return gulp.src('dist/*')
.pipe(zip('_final.zip'))
// rename gets the file passing through it, in this case '_final.zip'
.pipe(rename(function (path) {
// retrieve the array of string filenames, use the first and only one in the array
// and get the basename via split
path.basename = filenames.get("html")[0].split('.')[0] + path.basename;
}))
.pipe(gulp.dest('./dist'))
})
gulp.task('getHTMLname', function () {
return gulp.src("./dist/*.html")
.pipe(filenames("html"))
});
// **************************************************************
const gulp = require('gulp');
const rename = require('gulp-rename');
const zip = require('gulp-zip');
const glob = require("glob");
const path = require('path');
gulp.task('zipp', function () {
return gulp.src('dist/*')
.pipe(zip('_final.zip'))
.pipe(rename(function (file) {
// glob.sync returns an array, get the first member of that array
// path.basename('string', '.html') returns the name of the file without the extension
var temp = path.basename(glob.sync("./dist/*.html")[0], '.html');
file.basename = temp + file.basename;
}))
.pipe(gulp.dest('./dist'))
});
If you really want to use gulp-filenames, use the code above the ********'s. The 'getHTMLname' task must run first - that is when gulp-filenames builds its array of file names from whatever source directory you feed it. That array can later be used anywhere in a subsequent task.
However, I recommend the second method using glob and path modules, which you should learn anyway. The second method does not require a separate task, it just gets, globs, the html filename you want right in the same task so it is simpler. Also when I installed gulp-filenames it is using a ton of deprecated modules so it badly needs to be updated. It works now but may not as you update node, etc.

gulp-load-plugins.sourcemaps.init() TypeError: Cannot read property 'init' of undefined

I'm trying to adapt a gulp file to my purposes and I'm running into issues. I only care about one task:
gulp.task('js:browser', function () {
return mergeStream.apply(null,
Object.keys(jsBundles).map(function(key) {
return bundle(jsBundles[key], key);
})
);
});
It is using browserify to condense my bundle into a usable single file. It uses these two methods and this object:
function createBundle(src) {
//if the source is not an array, make it one
if (!src.push) {
src = [src];
}
var customOpts = {
entries: src,
debug: true
};
var opts = assign({}, watchify.args, customOpts);
var b = watchify(browserify(opts));
b.transform(babelify.configure({
stage: 1
}));
b.transform(hbsfy);
b.on('log', plugins.util.log);
return b;
}
function bundle(b, outputPath) {
var splitPath = outputPath.split('/');
var outputFile = splitPath[splitPath.length - 1];
var outputDir = splitPath.slice(0, -1).join('/');
console.log(outputFile);
console.log(plugins);
return b.bundle()
// log errors if they happen
.on('error', plugins.util.log.bind(plugins.util, 'Browserify Error'))
.pipe(source(outputFile))
// optional, remove if you don't need to buffer file contents
.pipe(buffer())
// optional, remove if you dont want sourcemaps
.pipe(plugins.sourcemaps.init({loadMaps: true})) // loads map from browserify file
// Add transformation tasks to the pipeline here.
.pipe(plugins.sourcemaps.write('./')) // writes .map file
.pipe(gulp.dest('build/public/' + outputDir));
}
var jsBundles = {
'js/polyfills/promise.js': createBundle('./public/js/polyfills/promise.js'),
'js/polyfills/url.js': createBundle('./public/js/polyfills/url.js'),
'js/settings.js': createBundle('./public/js/settings/index.js'),
'js/main.js': createBundle('./public/js/main/index.js'),
'js/remote-executor.js': createBundle('./public/js/remote-executor/index.js'),
'js/idb-test.js': createBundle('./public/js/idb-test/index.js'),
'sw.js': createBundle(['./public/js/sw/index.js', './public/js/sw/preroll/index.js'])
};
When I run the gulp task js:bower I get the following error coming from the the .pipe(plugins.sourcemaps.init({loadMaps: true})) expression:
TypeError: Cannot read property 'init' of undefined
I know that the lines are optional and I can just comment them out, but I do want them. When I run the code in the example file it works properly, when I run it in my gulp file it gives me the error. Any suggestions on what I might be missing? Thanks!
gulp-load-plugins analyzes the contents of your package.json file to find out which Gulp plugins you have installed. Make sure that gulp-sourcemaps is among the "devDependencies" defined there. If not run
npm install --save-dev gulp-sourcemaps
There's a small chance that your problem is related to lazy loading the sourcemaps plugin. If the above doesn't help try requiring gulp-load-plugins like this:
var plugins = require('gulp-load-plugins')({lazy:false});

Gulp to generate a file per folder

I'm trying to write a gulp task to build javascript file and concatenate a single file for each folder, including the root folder.
I have found this solution: https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md
If you have a set of folders, and wish to perform a set of tasks on
each, for instance...
/scripts
/scripts/jquery/*.js
/scripts/angularjs/*.js
...and want to end up with...
/scripts
/scripts/jquery.min.js
/scripts/angularjs.min.js
However, this only builds *.js file for all the subfolders inside scripts folder. I'm trying to build the *.js file inside the root folder scripts, i.e. my expected output will be:
/scripts.min.js
/scripts/jquery.min.js
/scripts/angularjs.min.js
I'm new to node so I'm confused now how to achieve that. Really appreciate your help on this. Thanks alot.
You can create a separate task (baseScripts) for creating the minified scripts for the base directory. Then create another task (allScripts) that runs both the baseScripts and subScripts tasks.
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('allScripts', ['baseScripts', 'subScripts']);
gulp.task('subScripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/*.js'))
.pipe(uglify())
.pipe(rename(folder + '.min.js'))
.pipe(gulp.dest(scriptsPath));
});
return merge(tasks);
});
gulp.task('baseScripts', function(){
return gulp.src(scriptsPath + '/*.js')
.pipe(uglify())
.pipe(concat('scripts.min.js'))
.pipe(gulp.dest('src'));
});
After a day, I have come up with some sort of modifying as below.
var paths = {
js: {
folder: 'dev/assets/js'
}
};
gulp.task('js', function() {
var folders = getFolders(paths.js.folder);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(paths.js.folder, folder.path, '/*.js'))
.pipe(uglify())
.pipe(concat(folder.name + '.min.js'))
.pipe(gulp.dest(paths.js.dest));
});
merge(tasks);
browserSync.reload();
});
var getFolders = function(dir) {
var folders = [{path:'',name:'app'}];
var folder = fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
for (var i=0; i<folder.length;i++) {
folders.push({path: folder[i], name: folder[i]});
}
return folders;
};
I have separated the directories and the names for combined script into 2 properties of an object. So I don't need to have different tasks for the root folder and subfolders.
Please feel free to give your comments on my approach :)

Chain Gulp glob to browserify transform

I have a project with a few relatively disjoint pages, each including their own entry point script. These scripts require a number of others using commonjs syntax, and need to be transformed by 6to5 and bundled by browserify.
I would like to set up a gulp task that captures all the files matching a pattern and passes them on to the bundler, but I'm not sure how to pass files from gulp.src to browserify(filename).
My gulpfile looks like:
var gulp = require("gulp");
var browserify = require("browserify");
var to5browserify = require("6to5-browserify");
var source = require("vinyl-source-stream");
var BUNDLES = [
"build.js",
"export.js",
"main.js"
];
gulp.task("bundle", function () {
/* Old version, using glob:
return gulp.src("src/** /*.js")
.pipe(sixto5())
.pipe(gulp.dest("dist"));
*/
// New version, using array:
return BUNDLES.map(function (bundle) {
return browserify("./src/" + bundle, {debug: true})
.transform(to5browserify)
.bundle()
.pipe(source(bundle))
.pipe(gulp.dest("./dist"));
});
});
gulp.task("scripts", ["bundle"]);
gulp.task("html", function () {
return gulp.src("src/**/*.html")
.pipe(gulp.dest("dist"));
});
gulp.task("styles", function () {
return gulp.src("src/**/*.css")
.pipe(gulp.dest("dist"));
});
gulp.task("default", ["scripts", "html", "styles"]);
This seems to work, but isn't maintainable: I'll be adding more scripts relatively soon, and don't want to add them to the array every time.
I've tried using gulp.src(glob).pipe within the browserify call and piping after calling (shown here), and gulp.src(glob).map (method doesn't exist).
How can you chain gulp.src with a name-based transformer like browserify?
Use through2 to make a one-off custom plugin stream that does all of the dirty work.
Unfortanately vinyl-transform and vinyl-source-stream and the solutions that go along with those have flaws so we have to go for something custom.
var gulp = require('gulp');
var through = require('through2');
var browserify = require('browserify');
gulp.task('bundle', function() {
var browserified = function() {
return through.obj(function(chunk, enc, callback) {
if(chunk.isBuffer()) {
var b = browserify(chunk.path);
// Any custom browserify stuff should go here
//.transform(to5browserify);
chunk.contents = b.bundle();
this.push(chunk);
}
callback();
});
};
return gulp.src(['./src/**/*.js'])
.pipe(browserified())
.pipe(gulp.dest('dest'));
});
You can specify globs in your BUNDLES array as well as exclude any files:
var BUNDLES = [
"app/**/*.js",
"export.js",
"app/modules/**/*.js",
"!app/modules/excluded/*.js"
];

Merge js files into one with phpStorm

Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().

Categories

Resources