I want to select a couple of files, and generate concatenated files from them. And also want to concatenate my changed files.
This is my assets tree:
assets
- css
- js
- sass
- _midia_queries.scss
- media-combine.scss
- query.scss
So, I want to generate maximum.css(./assets/css/media-combine.min.css) and medium.css(./assets/css/media-combine.min.css, ./assets/css/query.min.css).
Both .min files are been generated by my css task. See below:
gulp.task('css', function (done) {
gulp.src(paths.sass.src)
.pipe(plugins.changed('dist'))
.pipe(plugins.sass())
.pipe(plugins.combineMediaQueries())
.pipe(plugins.autoprefixer())
.pipe( isProduction ? plugins.minifyCss() : gutil.noop() )
.pipe( isProduction ? plugins.rename({ suffix: ".min" }) : gutil.noop() )
.pipe(gulp.dest(paths.css.dest))
.on("end", done);
});
And my concat-css task is using my bad approach of multiple generation files:
gulp.task('concat-css', ['css'], function (done) {
concatType("css", done);
});
function concatType (type, done) {
if (isProduction) {
var task = gulp;
filesToConcat[type].forEach(function (value) {
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
gutil.log('File', gutil.colors.green(value.compiled), 'is beign concatenated');
task = gulp.src(value.files)
.pipe(plugins.concat(value.compiled))
.pipe(gulp.dest(paths[type].dest));
gutil.log('Ready to go!');
});
task.on("end", done);
}
}
Update
When I type gulp on my console/terminal, it runs perfectly my css and concat-css tasks, but, when the watch tasks is triggered by a file change, both run, but, only css works. My .min files are updated, but, my concatenated files are not.
Any ideas to approach this? Thanks.
Sorry, it was a logic problem.
At my concatType function, when I was setting up the path for the targeted files, but, when it was running on watch the path was set a second time, like this:
./assets/css/./assets/css/query.min.css
It would never work. So, I've changed this:
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
task = gulp.src(value.files)
For this:
files = [];
value.files.forEach(function (file) {
files.push(paths[type].dest + file);
});
task = gulp.src(files)
Related
I'm trying to write a node.js script that watches for changes in a directory of files, and then prints the files that are changed. How can I modify this script so that it watches a directory (instead of an individual file), and prints the names of the files in the directory as they are changed?
var fs = require('fs'),
sys = require('sys');
var file = '/home/anderson/Desktop/fractal.png'; //this watches a file, but I want to watch a directory instead
fs.watchFile(file, function(curr, prev) {
alert("File was modified."); //is there some way to print the names of the files in the directory as they are modified?
});
Try Chokidar:
var chokidar = require('chokidar');
var watcher = chokidar.watch('file or dir', {ignored: /^\./, persistent: true});
watcher
.on('add', function(path) {console.log('File', path, 'has been added');})
.on('change', function(path) {console.log('File', path, 'has been changed');})
.on('unlink', function(path) {console.log('File', path, 'has been removed');})
.on('error', function(error) {console.error('Error happened', error);})
Chokidar solves some of the crossplatform issues with watching files using just fs.
Why not just use the old fs.watch? Its pretty straightforward.
fs.watch('/path/to/folder', (eventType, filename) => {
console.log(eventType);
// could be either 'rename' or 'change'. new file event and delete
// also generally emit 'rename'
console.log(filename);
})
For more info and details about the options param, see Node fs Docs
try hound:
hound = require('hound')
// Create a directory tree watcher.
watcher = hound.watch('/tmp')
// Create a file watcher.
watcher = hound.watch('/tmp/file.txt')
// Add callbacks for file and directory events. The change event only applies
// to files.
watcher.on('create', function(file, stats) {
console.log(file + ' was created')
})
watcher.on('change', function(file, stats) {
console.log(file + ' was changed')
})
watcher.on('delete', function(file) {
console.log(file + ' was deleted')
})
// Unwatch specific files or directories.
watcher.unwatch('/tmp/another_file')
// Unwatch all watched files and directories.
watcher.clear()
It will execute once file was change
I'm trying to write a gulp task to build javascript file and concatenate a single file for each folder, including the root folder.
I have found this solution: https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md
If you have a set of folders, and wish to perform a set of tasks on
each, for instance...
/scripts
/scripts/jquery/*.js
/scripts/angularjs/*.js
...and want to end up with...
/scripts
/scripts/jquery.min.js
/scripts/angularjs.min.js
However, this only builds *.js file for all the subfolders inside scripts folder. I'm trying to build the *.js file inside the root folder scripts, i.e. my expected output will be:
/scripts.min.js
/scripts/jquery.min.js
/scripts/angularjs.min.js
I'm new to node so I'm confused now how to achieve that. Really appreciate your help on this. Thanks alot.
You can create a separate task (baseScripts) for creating the minified scripts for the base directory. Then create another task (allScripts) that runs both the baseScripts and subScripts tasks.
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('allScripts', ['baseScripts', 'subScripts']);
gulp.task('subScripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/*.js'))
.pipe(uglify())
.pipe(rename(folder + '.min.js'))
.pipe(gulp.dest(scriptsPath));
});
return merge(tasks);
});
gulp.task('baseScripts', function(){
return gulp.src(scriptsPath + '/*.js')
.pipe(uglify())
.pipe(concat('scripts.min.js'))
.pipe(gulp.dest('src'));
});
After a day, I have come up with some sort of modifying as below.
var paths = {
js: {
folder: 'dev/assets/js'
}
};
gulp.task('js', function() {
var folders = getFolders(paths.js.folder);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(paths.js.folder, folder.path, '/*.js'))
.pipe(uglify())
.pipe(concat(folder.name + '.min.js'))
.pipe(gulp.dest(paths.js.dest));
});
merge(tasks);
browserSync.reload();
});
var getFolders = function(dir) {
var folders = [{path:'',name:'app'}];
var folder = fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
for (var i=0; i<folder.length;i++) {
folders.push({path: folder[i], name: folder[i]});
}
return folders;
};
I have separated the directories and the names for combined script into 2 properties of an object. So I don't need to have different tasks for the root folder and subfolders.
Please feel free to give your comments on my approach :)
Under my assets/ folder, I have numerous subfolders, each containing an arbitrary number of images, like so:
assets/article1/
assets/article2/
I'm trying to write a gulp task to locate all .jpg images within and generate their thumbnail versions, to be saved in a thumbs/ subfolder within the folder where each file resides:
assets/article1/ # original jpg images
assets/article1/thumbs/ # thumbnail versions of above..
assets/article2/
assets/article2/thumbs/
I've been trying various approaches but no luck. The closest I've come is:
gulp.task('thumbs', function () {
return gulp.src( './assets/**/*.jpg' )
.pipe( imageResize( { width: 200 } ) )
.pipe( gulp.dest( function( file ) { return file.base + '/thumbs/'; } ) );
});
However, this creates a single thumbs\ folder at the root of assets\
assets/article1/
assets/article2/
assets/thumbs/article1/
assets/thumbs/article2/
Is there a good info on the paths and wildcards anywhere? Clearly I'm not handling it well..
You could use path.dirname for that: http://nodejs.org/api/path.html#path_path_dirname_p
// require core module
var path = require('path');
gulp.task('thumbs', function () {
return gulp.src( './assets/**/*.jpg' )
.pipe( imageResize( { width: 200 } ) )
.pipe( gulp.dest( function( file ) { return path.join(path.dirname(file.path), 'thumbs'); } ) );
});
Here's what worked for me given the following directory structure (I simplified a bit to focus on just getting the files in the right place)
assets/article1/1.jpg
assets/article2/2.jpg
with a desired outcome of
assets/article1/1.jpg
assets/article1/thumbs/1.jpg
assets/article2/2.jpg
assets/article2/thumbs/2.jpg
Here's what worked for me (modified from this recipe https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md)
var gulp = require('gulp'),
rename = require('gulp-rename'),
path = require('path'),
fs = require('fs');
var scriptsPath = 'assets'; // folder to process
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('thumbs', function() {
var folders = getFolders(scriptsPath);
return folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.jpg'))
.pipe(rename({dirname: folder + '/thumbs/'}))
.pipe(gulp.dest(scriptsPath));
});
});
I've found another approach that may be useful for anyone searching this post.
My original goal was very similar to your request, and this approach may be adjusted to almost any particular need.
function scss(cb) {
src('./*/scss/*.scss')
.pipe(sourcemaps.init())
.pipe(sass())
.pipe(rename({extname: '.min.css'}))
.pipe(tap(function(file) {
//Enrich the file with crucial information about it's original path, you can save anything you may need. In my case filename is quite enough. It's important to move this block before any sourcemap write if you've any.
file.name = path.basename(file.path);
}))
.pipe(sourcemaps.write('.'))
.pipe(flatten()) //Remove file relative paths.
.pipe(dest(
function( file ) {
//Reconstruct final path using insight we saved before, and using a clean base path provided by flatten.
return path.join(path.dirname(file.path), file.name.replace(/\.min\..+$/, ''), 'static');
}));
cb();
}
Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().
I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`