Gulp - Run tasks once per folder - javascript

I'm using Gulp to automate the production of banner ads. I have a list of folders (project.banners) and a set of tasks that should be run on each folder.
With my limited knowledge of Gulp I can not figure out how to do this automatically. For now I have to manually update the var banner to point to one particular folder.
I know there are several similar questions on StackOverflow, but could not find anyone that fit or solved my problem, and my trials with a forEach loop has only resulted in errors.
Example:
// Settings
var project = {
title : "Client - Project",
devPath : "dev/client/banner/",
banners : [
"180x500",
"580x400",
"468x468"
]
}
// Choose which banner/folder to Gulp (should be set by a loop!!?)
var banner = project.banners[0];
// Run tasks
gulp.task('default', function(callback) {
runSequence(
'css',
'html',
'zip',
callback
);
});
// Example task
gulp.task('css', function() {
return gulp.src(devPath + banner + '/style.css')
.pipe(minifycss())
.pipe(gulp.dest('dist/temp/' + banner + '/'))
});

Following the indications found on the official gulp recipe I think something like this should work, but I have yet to test it.
// ...
var fs = require('fs');
var path = require('path');
// ...
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('banners', function() {
// get all folders inside project.devPath, if you need specific
// folders maybe you should wrap them inside another
var folders = getFolders(project.devPath);
return folders.map(function(folder) {
// .map runs this action on each folder and returns the stream
return gulp.src(project.debPath + folder + '/style.js')
.pipe(minifycss())
.pipe(gulp.dest('dist/temp/' + folder + '/'));
});
});

You could probably generate a new task for each banner dimension and call them separately in you default task.
project.banners.forEach(function (banner /* <- this will be "180x500" the first time, "580x400" the next time etc. */ ) {
gulp.task(banner, function () {
return gulp.src(devPath + banner + '/style.css')
.pipe(minifycss())
.pipe(gulp.dest('dist/temp/' + banner + '/'));
});
});
Then in your default task run:
runSequence.apply(this, project.banners.concat(['html', 'zip', callback]));
Doing .apply() will send the array as seperate arguments

For reference, I ended up doing this:
Loop through all banners and call a custom function ('createBanner') with banner as variable.
Create tasks with dynamic tasknames inside function.
Run tasks in sequence.
// Settings
var project = {
title : "Client - Project",
devPath : "dev/client/banner/",
banners : [
// Array with banners (= folder names)
"180x500",
"580x400",
"468x468"
]
}
// Place all tasks inside this function - receives banner/foldername
function createBanner(banner) {
// Example task - with dynamic taskname
gulp.task('css_' + banner, function() {
return gulp.src(devPath + banner + '/style.css')
.pipe(minifycss())
.pipe(gulp.dest('dist/temp/' + banner + '/'))
});
// ... Other tasks
// Run tasks in sequence
runSequence(
'css_' + banner,
'html_' + banner,
'zip_' + banner
);
}
// Default task
gulp.task('default', function(callback) {
// Loop through all banners - call function with current banner as variable
for (var i = 0; i < project.banners.length; i++) {
createBanner(project.banners[i]);
};
});

Related

Gulp watch() not triggering the less process

The following gulp watch task isn't getting triggered when I change any LESS file in the project. Can anyone spot what I'm doing wrong? Most the answers here say to NOT use the watch-less module, which I'm not. It's supposed to listen to changes in any LESS file in the project and when one changes, go to the app.less file to regenerate the CSS file (app.less has #includes to all the files).
var watch = require("gulp-watch");
var less = require("gulp-less");
gulp.watch(paths.source + "**/*.less", function(event){
gulp.src(paths.source + paths.assets + paths.less + "app.less")
.pipe(less().on("error", console.log))
.pipe(gulp.dest(paths.dev + paths.css));
});
Here are some issues:
require("gulp-watch"); is useless here. Actually gulp.watch is a core API of gulp.
The gulpfile.js consists of several gulp tasks.
Run gulp watch in your terminal.
For example:
var gulp = require('gulp');
var path = require('path');
var less = require('gulp-less');
var paths = {
// your paths
};
gulp.task('styles', function () {
return gulp.src(paths.source + paths.assets + paths.less + "app.less")
.pipe(less({
// paths to be used for #import directives
paths: [ path.join(__dirname, 'less', 'includes') ]
}))
.pipe(gulp.dest('./'));
});
gulp.task('watch', function() {
gulp.watch('less/**/*.less', ['styles']);
});

gulp - How to concatenate multiple files with watch?

I want to select a couple of files, and generate concatenated files from them. And also want to concatenate my changed files.
This is my assets tree:
assets
- css
- js
- sass
- _midia_queries.scss
- media-combine.scss
- query.scss
So, I want to generate maximum.css(./assets/css/media-combine.min.css) and medium.css(./assets/css/media-combine.min.css, ./assets/css/query.min.css).
Both .min files are been generated by my css task. See below:
gulp.task('css', function (done) {
gulp.src(paths.sass.src)
.pipe(plugins.changed('dist'))
.pipe(plugins.sass())
.pipe(plugins.combineMediaQueries())
.pipe(plugins.autoprefixer())
.pipe( isProduction ? plugins.minifyCss() : gutil.noop() )
.pipe( isProduction ? plugins.rename({ suffix: ".min" }) : gutil.noop() )
.pipe(gulp.dest(paths.css.dest))
.on("end", done);
});
And my concat-css task is using my bad approach of multiple generation files:
gulp.task('concat-css', ['css'], function (done) {
concatType("css", done);
});
function concatType (type, done) {
if (isProduction) {
var task = gulp;
filesToConcat[type].forEach(function (value) {
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
gutil.log('File', gutil.colors.green(value.compiled), 'is beign concatenated');
task = gulp.src(value.files)
.pipe(plugins.concat(value.compiled))
.pipe(gulp.dest(paths[type].dest));
gutil.log('Ready to go!');
});
task.on("end", done);
}
}
Update
When I type gulp on my console/terminal, it runs perfectly my css and concat-css tasks, but, when the watch tasks is triggered by a file change, both run, but, only css works. My .min files are updated, but, my concatenated files are not.
Any ideas to approach this? Thanks.
Sorry, it was a logic problem.
At my concatType function, when I was setting up the path for the targeted files, but, when it was running on watch the path was set a second time, like this:
./assets/css/./assets/css/query.min.css
It would never work. So, I've changed this:
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
task = gulp.src(value.files)
For this:
files = [];
value.files.forEach(function (file) {
files.push(paths[type].dest + file);
});
task = gulp.src(files)

Merge js files into one with phpStorm

Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().

Iterating over directories with Gulp?

I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`

Hot Code Push NodeJS

I've been trying to figure out this "Hot Code Push" on Node.js. Basically, my main file (that is run when you type node app.js) consists of some settings, configurations, and initializations. In that file I have a file watcher, using chokidar. When I file has been added, I simply require the file. If a file has been changed or updated I would delete the cache delete require.cache[path] and then re-require it. All these modules don't export anything, it just works with the single global Storm object.
Storm.watch = function() {
var chokidar, directories, self = this;
chokidar = require('chokidar');
directories = ['server/', 'app/server', 'app/server/config', 'public'];
clientPath = new RegExp(_.regexpEscape(path.join('app', 'client')));
watcher = chokidar.watch(directories, {
ignored: function(_path) {
if (_path.match(/\./)) {
!_path.match(/\.(js|coffee|iced|styl)$/);
} else {
!_path.match(/(app|config|public)/);
}
},
persistent: true
});
watcher.on('add', function(_path){
self.fileCreated(path.resolve(Storm.root, _path));
//Storm.logger.log(Storm.cliColor.green("File Added: ", _path));
//_console.info("File Updated");
console.log(Storm.css.compile(' {name}: {file}', "" +
"name" +
"{" +
"color: white;" +
"font-weight:bold;" +
"}" +
"hr {" +
"background: grey" +
"}")({name: "File Added", file: _path.replace(Storm.root, ""), hr: "=================================================="}));
});
watcher.on('change', function(_path){
_path = path.resolve(Storm.root, _path);
if (fs.existsSync(_path)) {
if (_path.match(/\.styl$/)) {
self.clientFileUpdated(_path);
} else {
self.fileUpdated(_path);
}
} else {
self.fileDeleted(_path);
}
//Storm.logger.log(Storm.cliColor.green("File Changed: ", _path));
console.log(Storm.css.compile(' {name}: {file}', "" +
"name" +
"{" +
"color: yellow;" +
"font-weight:bold;" +
"}" +
"hr {" +
"background: grey" +
"}")({name: "File Changed", file: _path.replace(Storm.root, ""), hr: "=================================================="}));
});
watcher.on('unlink', function(_path){
self.fileDeleted(path.resolve(Storm.root, _path));
//Storm.logger.log(Storm.cliColor.green("File Deleted: ", _path));
console.log(Storm.css.compile(' {name}: {file}', "" +
"name" +
"{" +
"color: red;" +
"font-weight:bold;" +
"}" +
"hr {" +
"background: grey" +
"}")({name: "File Deleted", file: _path.replace(Storm.root, ""), hr: "=================================================="}));
});
watcher.on('error', function(error){
console.log(error);
});
};
Storm.watch.prototype.fileCreated = function(_path) {
if (_path.match('views')) {
return;
}
try {
require.resolve(_path);
} catch (error) {
require(_path);
}
};
Storm.watch.prototype.fileDeleted = function(_path) {
delete require.cache[require.resolve(_path)];
};
Storm.watch.prototype.fileUpdated = function(_path) {
var self = this;
pattern = function(string) {
return new RegExp(_.regexpEscape(string));
};
if (_path.match(pattern(path.join('app', 'templates')))) {
Storm.View.cache = {};
} else if (_path.match(pattern(path.join('app', 'helpers')))) {
self.reloadPath(path, function(){
self.reloadPaths(path.join(Storm.root, 'app', 'controllers'));
});
} else if (_path.match(pattern(path.join('config', 'assets.coffee')))) {
self.reloadPath(_path, function(error, config) {
//Storm.config.assets = config || {};
});
} else if (_path.match(/app\/server\/(models|controllers)\/.+\.(?:coffee|js|iced)/)) {
var isController, directory, klassName, klass;
self.reloadPath(_path, function(error, config) {
if (error) {
throw new Error(error);
}
});
Storm.serverRefresh();
isController = RegExp.$1 == 'controllers';
directory = 'app/' + RegExp.$1;
klassName = _path.split('/');
klassName = klassName[klassName.length - 1];
klassName = klassName.split('.');
klassName.pop();
klassName = klassName.join('.');
klassName = _.camelize(klassName);
if (!klass) {
require(_path);
} else {
console.log(_path);
self.reloadPath(_path)
}
} else if (_path.match(/config\/routes\.(?:coffee|js|iced)/)) {
self.reloadPath(_path);
} else {
this.reloadPath(_path);
}
};
Storm.watch.prototype.reloadPath = function(_path, cb) {
_path = require.resolve(path.resolve(Storm.root, path.relative(Storm.root, _path)));
delete require.cache[_path];
delete require.cache[path.resolve(path.join(Storm.root, "server", "application", "server.js"))];
//console.log(require.cache[path.resolve(path.join(Storm.root, "server", "application", "server.js"))]);
require("./server.js");
Storm.App.use(Storm.router);
process.nextTick(function(){
Storm.serverRefresh();
var result = require(_path);
if (cb) {
cb(null, result);
}
});
};
Storm.watch.prototype.reloadPaths = function(directory, cb) {
};
Some of the code is incomplete / not used as I'm trying a lot of different methods.
What's Working:
For code like the following:
function run() {
console.log(123);
}
Works perfectly. But any asynchronous code fails to update.
Problem = Asynchronous Code
app.get('/', function(req, res){
// code here..
});
If I then update the file when the nodejs process is running, nothing happens, though it goes through the file watcher and the cache is deleted, then re-established. Another instance where it doesn't work is:
// middleware.js
function hello(req, res, next) {
// code here...
}
// another file:
app.use(hello);
As app.use would still be using the old version of that method.
Question:
How could I fix the problem? Is there something I'm missing?
Please don't throw suggestions to use 3rd party modules like forever. I'm trying to incorporate the functionality within the single instance.
EDIT:
After studying meteors codebase (there's surprisingly little resources on "Hot Code Push" in node.js or browser.) and tinkering around with my own implementation I've successfully made a working solution. https://github.com/TheHydroImpulse/Refresh.js . This is still at an early stage of development, but it seems solid right now. I'll be implementing a browser solution too, just for sake of completion.
Deleting require's cache doesn't actually "unload" your old code, nor does it undo what that code did.
Take for example the following function:
var callbacks=[];
registerCallback = function(cb) {
callbacks.push(cb);
};
Now let's say you have a module that calls that global function.
registerCallback(function() { console.log('foo'); });
After your app starts up, callbacks will have one item. Now we'll modify the module.
registerCallback(function() { console.log('bar'); });
Your 'hot patching' code runs, deletes the require.cached version and re-loads the module.
What you must realize is that now callbacks has two items. First, it has a reference to the function that logs foo (which was added on app startup) and a reference to the function that logs bar (which was just added).
Even though you deleted the cached reference to the module's exports, you can't actually delete the module. As far as the JavaScript runtime is concerned, you simply removed one reference out of many. Any other part of your application can still be hanging on to a reference to something in the old module.
This is exactly what is happening with your HTTP app. When the app first starts up, your modules attach anonymous callbacks to routes. When you modify those modules, they attach a new callback to the same routes; the old callbacks are not deleted. I'm guessing that you're using Express, and it calls route handlers in the order they were added. Thus, the new callback never gets a chance to run.
To be honest, I wouldn't use this approach to reloading you app on modification. Most people write app initialization code under the assumption of a clean environment; you're violating that assumption by running initialization code in a dirty environment – that is, one which is already up and running.
Trying to clean up the environment to allow your initialization code to run is almost certainly more trouble than it's worth. I'd simply restart the entire app when your underlying files have changed.
Meteor solves this problem by allowing modules to "register" themselves as part of the hot code push process.
They implement this in their reload package:
https://github.com/meteor/meteor/blob/master/packages/reload/reload.js#L105-L109
I've seen that Meteor.reload API used in some plugins on GitHub, but they also use it in the session package:
https://github.com/meteor/meteor/blob/master/packages/session/session.js#L103-L115
if (Meteor._reload) {
Meteor._reload.onMigrate('session', function () {
return [true, {keys: Session.keys}];
});
(function () {
var migrationData = Meteor._reload.migrationData('session');
if (migrationData && migrationData.keys) {
Session.keys = migrationData.keys;
}
})();
}
So basically, when the page/window loads, meteor runs a "migration", and it's up to the package to define the data/methods/etc. that get recomputed when a hot code push is made.
It's also being used by their livedata package (search reload).
Between refreshes they're saving the "state" using window.sessionStorage.

Categories

Resources