Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().
Related
I'm building an MVC application, which is making use of Areas. So I've got a folder structure as follows:
Areas/
AreaOne/
Views/
About/
ScriptOne.js
ScriptTwo.js
Index.cshtml
Home/
ScriptThree.js
ScriptFour.js
Index.cshtml
AreaTwo
Views/...
I'd like to bundle the JavaScript files within the individual page's folder within the View folder.
E.g. ScriptOne.js, ScriptTwo.js should be bundled into About.js and output into a seperate folder like:
js/
Areas/
AreaOne/
About.js
Home.js
I'm able to grab the files, however it will grab all .js files under Areas.
gulp.src('./Areas/**/*.js')
Areas\AreaOne\Views\About\ScriptOne.js
Areas\AreaOne\Views\About\ScriptTwo.js
Areas\AreaOne\Views\Home\ScriptThree.js
Areas\AreaOne\Views\Home\ScriptFour.js
If I concat this I'll end up with a single .js file. However I'd like About.js and Home.js
If I can achieve the above in theory I can use the following (but i'm unsure how to get the appropriate area name:
pipe(gulp.dest('./js/Areas/...AREA NAME GOES HERE.../'))
I'm trying to avoid manual concatenation of files.. like so..
gulp.src(['./Areas/AreaOne/Views/About/ScriptOne.js', './Areas/AreaOne/Views/About/ScriptTwo.js'])
Not sure it's the most succinct way but it works and it's fairly simple.
I use Glob to get the file names within the Areas.
I then add the view paths to a JavaScript object (this works like a set, it prevents duplicates), I also add the name of the view folder as this will be the output file name.
I use gulp.src with the distinct view folder names to collect all of the Javascript files within the individual view folder and then concat them, and finally if it's a release build the files get ugilfied.
var gulp = require('gulp'),
concat = require('gulp-concat'),
uglify = require('gulp-uglify'),
glob = require('glob'),
path = require('path');
gulp.task('process-areas-js', function () {
glob('./Areas/**/*.js*', null, function (er, files) {
var distinctAreaPaths = {};
for (var i = 0; i < files.length; i++) {
var filePath = path.dirname(files[i]).replace(/\./g, "");
var viewFolderName = path.basename(filePath);
distinctAreaPaths[filePath] = viewFolderName;
}
for (distinctPath in distinctAreaPaths) {
var concatName = distinctAreaPaths[distinctPath] + ".js";
var destination = './js' + path.dirname(distinctPath);
var sourceGlob = "." + distinctPath + "/*.js";
var pipeline = gulp.src(sourceGlob)
.pipe(concat(concatName))
uglifyOnRelease(pipeline)
.pipe(gulp.dest(destination));
}
});
});
gulp.task('default', ['process-areas-js']);
function uglifyOnRelease(pipeline) {
return process.env.NODE_ENV === 'Release' || process.env.NODE_ENV == null ? pipeline.pipe(uglify()) : pipeline;
}
I want to select a couple of files, and generate concatenated files from them. And also want to concatenate my changed files.
This is my assets tree:
assets
- css
- js
- sass
- _midia_queries.scss
- media-combine.scss
- query.scss
So, I want to generate maximum.css(./assets/css/media-combine.min.css) and medium.css(./assets/css/media-combine.min.css, ./assets/css/query.min.css).
Both .min files are been generated by my css task. See below:
gulp.task('css', function (done) {
gulp.src(paths.sass.src)
.pipe(plugins.changed('dist'))
.pipe(plugins.sass())
.pipe(plugins.combineMediaQueries())
.pipe(plugins.autoprefixer())
.pipe( isProduction ? plugins.minifyCss() : gutil.noop() )
.pipe( isProduction ? plugins.rename({ suffix: ".min" }) : gutil.noop() )
.pipe(gulp.dest(paths.css.dest))
.on("end", done);
});
And my concat-css task is using my bad approach of multiple generation files:
gulp.task('concat-css', ['css'], function (done) {
concatType("css", done);
});
function concatType (type, done) {
if (isProduction) {
var task = gulp;
filesToConcat[type].forEach(function (value) {
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
gutil.log('File', gutil.colors.green(value.compiled), 'is beign concatenated');
task = gulp.src(value.files)
.pipe(plugins.concat(value.compiled))
.pipe(gulp.dest(paths[type].dest));
gutil.log('Ready to go!');
});
task.on("end", done);
}
}
Update
When I type gulp on my console/terminal, it runs perfectly my css and concat-css tasks, but, when the watch tasks is triggered by a file change, both run, but, only css works. My .min files are updated, but, my concatenated files are not.
Any ideas to approach this? Thanks.
Sorry, it was a logic problem.
At my concatType function, when I was setting up the path for the targeted files, but, when it was running on watch the path was set a second time, like this:
./assets/css/./assets/css/query.min.css
It would never work. So, I've changed this:
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
task = gulp.src(value.files)
For this:
files = [];
value.files.forEach(function (file) {
files.push(paths[type].dest + file);
});
task = gulp.src(files)
I'm trying to write a gulp task to build javascript file and concatenate a single file for each folder, including the root folder.
I have found this solution: https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md
If you have a set of folders, and wish to perform a set of tasks on
each, for instance...
/scripts
/scripts/jquery/*.js
/scripts/angularjs/*.js
...and want to end up with...
/scripts
/scripts/jquery.min.js
/scripts/angularjs.min.js
However, this only builds *.js file for all the subfolders inside scripts folder. I'm trying to build the *.js file inside the root folder scripts, i.e. my expected output will be:
/scripts.min.js
/scripts/jquery.min.js
/scripts/angularjs.min.js
I'm new to node so I'm confused now how to achieve that. Really appreciate your help on this. Thanks alot.
You can create a separate task (baseScripts) for creating the minified scripts for the base directory. Then create another task (allScripts) that runs both the baseScripts and subScripts tasks.
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('allScripts', ['baseScripts', 'subScripts']);
gulp.task('subScripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/*.js'))
.pipe(uglify())
.pipe(rename(folder + '.min.js'))
.pipe(gulp.dest(scriptsPath));
});
return merge(tasks);
});
gulp.task('baseScripts', function(){
return gulp.src(scriptsPath + '/*.js')
.pipe(uglify())
.pipe(concat('scripts.min.js'))
.pipe(gulp.dest('src'));
});
After a day, I have come up with some sort of modifying as below.
var paths = {
js: {
folder: 'dev/assets/js'
}
};
gulp.task('js', function() {
var folders = getFolders(paths.js.folder);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(paths.js.folder, folder.path, '/*.js'))
.pipe(uglify())
.pipe(concat(folder.name + '.min.js'))
.pipe(gulp.dest(paths.js.dest));
});
merge(tasks);
browserSync.reload();
});
var getFolders = function(dir) {
var folders = [{path:'',name:'app'}];
var folder = fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
for (var i=0; i<folder.length;i++) {
folders.push({path: folder[i], name: folder[i]});
}
return folders;
};
I have separated the directories and the names for combined script into 2 properties of an object. So I don't need to have different tasks for the root folder and subfolders.
Please feel free to give your comments on my approach :)
I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`
I'm writing a new kind of structure and scalability for an express application.
Issue:
I don't like to define each route on app.js file.
Solution:
Make something automated in order to load the routes automatically.
So far, I have this code on index.js (routes folder) file:
var fs = require('fs');
module.exports = function(app) {
recursive_require(__dirname, app);
function recursive_require(directory, app) {
fs.readdirSync(directory).forEach(function(file) {
if (fs.lstatSync(directory + '/' + file + '/').isDirectory()) {
var has_no_js_files = false;
directory = directory + '/' + file + '/';
console.log('Scanning recursively on ' + directory);
// We run across the directory to check if there are any js files.
fs.readdirSync(directory).forEach(function(file) {
console.log('Reading file/directory ' + file);
if (file.match(/\.js$/g)) {
has_no_js_files = true;
console.log('Found js files on directory ' + directory);
}
});
// If the folder has no js files, we take in mind that there are other folders inside
// so we scan the folder recursively.
if (!has_no_js_files) {
console.log('No JS files found on ' + directory + ' going to scan recursively');
recursive_require(directory.substr(0, directory.lastIndexOf('/')));
} else {
// Otherwise, we require the directory taking in mind that we have just js files.
console.log('Found JS files on ' + directory + ', require them');
require(directory)(app);
}
}
});
}
}
Now, This seems to work but I have a bit of an issue..
My idea is to have everything on folders, so say, this structure:
routes
admin
posts
index.js <- handles add, remove, edit for posts
users
index.js <- handles add, remove, edit for users
blog
posts
index.js <- handles show for frontend
index.js <- Loads all of the files recursively.
Now, I have a bit of an issue with this code...
I'm having this error:
PS C:\Users\bony-_000\Documents\GitHub\node-blog> node app
Scanning recursively on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/
Reading file/directory posts
No JS files found on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/ going to scan recursively
Scanning recursively on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/
Reading file/directory index.js
Found js files on directory C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/
Found JS files on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/, require them
C:\Users\bony-_000\Documents\GitHub\node-blog\routes\admin\posts\index.js:2
app.get('/admin/posts/add', function(req, res) {
^
TypeError: Cannot call method 'get' of undefined
Though I'm sending the app var...
Any help will be much appreciated, also, feel free to use the code.
I've solved all the issues and now I have my structured MVC Online!
This is the code for anyone to use:
var fs = require('fs'),
required_files = [];
module.exports = function(app) {
recursive_require(__dirname, __dirname, app);
function recursive_require(directory, base_dir, app) {
fs.readdirSync(directory).forEach(function (input) {
var next_directory = directory + '/' + input + '/';
// If we are on the base dir, we ignore the index.js file
if (!(required_files.indexOf(base_dir + '/index') > -1)) {
required_files.push(base_dir + '/index');
}
// Check if it's a directory
if (fs.lstatSync(next_directory).isDirectory()) {
// We require it recursively
console.log('Reading directory ' + next_directory);
recursive_require(next_directory.substr(0, next_directory.lastIndexOf('/')), base_dir, app);
} else {
// We require all (except the index.js file if the var is set to true) js files on folder
require_files(directory, app);
return;
}
});
}
function require_files(directory, app) {
fs.readdir(directory, function(err, files) {
files.forEach(function(file) {
if (file.match(/\.js$/g)) {
var file_path = directory + '/' + file;
file_path = file_path.substr(0, file_path.indexOf('.js'));
if (required_files.indexOf(file_path) == -1) {
required_files.push(file_path);
require(file_path)(app);
}
}
});
});
return;
}
}
Any suggestions are welcome.