Gulp - add contents of one file to end of multiple files - javascript

I'm fairly new to Gulp so, hopefully this is a simple question. My project comprises many files (more than shown in the example below), and the magic of Gulp lets me combine, minify, babel, etc. All good - I've used Grunt for years so I understand the basic concepts, but this project requires Gulp for ((reasons)), so here I am.
The project results in several output scripts. Each one is, basically, the same core application compiled with different configuration options. The config is substantial, so not appropriate to pass in at instantiation. Anyway, all good there, just some background.
The problem I'm having is the project is dependent on one vendor script. I don't want to run that script though all the processing, rather I need to prepend (preferably) or append it to each output script after all processing has happened on the app codebase.
Here's what I've tried to do, but it's failing. I also tried the commented out stuff (without the extra task in the series) but the files aren't output to dist until the entire routine is complete (which makes sense, but you know... ever hopeful). Note: I 'anonymized' the script a bit, so hopefully I didn't put any weird syntax errors in... my build script runs fine, I just can't figure out how to get the vendor script prepended without a bunch of ugly hardcoded tasks for each instance (there's several instances, so I'd rather not do it that way).
Thanks in advance!
function instance1(cb) {
return gulp.src([
'src/app/data/data_en-us.js',
'src/app/data/data_pt-br.js',
'src/app/data/data.js',
'src/app/instances/_global/global_config.js',
'src/app/instances/_global/report_config.js',
'src/app/instances/1/config.js',
'src/app/instances/1/report_config.js',
'src/app/core/calculator.js',
'src/app/core/report.js',
'src/app/instances/_global/global.js',
'src/app/instances/1/instance.js',
])
.pipe(sourcemaps.init())
.pipe(babel({
presets: ['#babel/env']
}))
.pipe(concat('app.js'))
.pipe(sourcemaps.write('maps'))
.pipe(gulp.dest('dist'))
.pipe(minify({preserveComments:'some'}))
.pipe(gulp.dest('dist'))
//.pipe(src(['src/vendor/script.js','dist/app.js']))
//.pipe(concat('dist/app.js'))
//.pipe(src(['src/vendor/script.js','dist/app-min.js']))
//.pipe(concat('dist/app-min.js'))
//.pipe(gulp.dest('dist'));
cb();
}
function appendVendorScript(cb) {
return gulp.src(['src/vendor/script.js','dist/*.js'])
.pipe(concat())
.pipe(gulp.dest('dist'));
cb();
}
exports.build = series(
cleanup,
parallel(
series(instance1,appendVendorScript)
//more items here for each instance
)
);

I found a solution that also lets me minimize redundancy. Turns our, the sourcemap plugin was responsible, so I just removed it. Talking with some other colleagues, I learned that they too had issues with that plugin. I guess it's just poorly done, or old, or otherwise not well maintained. Such is life in open source :)
Here's what I came up with. Works well, but when I run build to do them all at once, it somehow breaks the code. Works perfectly when I build them one at a time... so far, haven't been able to explain that one, but one problem at a time!
const coreFilesList = [
//list of codebase files here
];
const vendorFilesList = [
//list of vendor files here
];
const outputFileData = {
prefix: 'app',
seperator: '_',
ext: '.js'
}
function instance1(cb) {
const token = 'instance1';
process(coreFilesList,token);
cb();
}
function instance2(cb) {
const token = 'instance2';
process(coreFilesList,token);
cb();
}
function process(srclist,token){
let outputFileName = outputFileData.prefix + outputFileData.seperator + token + outputFileData.ext;
for (let i = 0; i < srclist.length; i++){
if(srclist[i].match(/_TOKEN_/)){
srclist[i] = srclist[i].replace(/_TOKEN_/g,token);
}
}
return src(srclist)
.pipe(concat(outputFileName))
.pipe(babel({
presets: ['#babel/env']
}))
.pipe(src(vendorFilesList))
.pipe(concat(outputFileName))
.pipe(minify({preserveComments:'some'}))
.pipe(dest('dist'));
}
exports.instance1 = series(cleanup, instance1);
exports.instance2 = series(cleanup, instance2);
//more here
exports.build = series(
cleanup,
parallel(
instance1,
instance2
//more here
)
);

Related

Separating/combining gulp task sources using yargs and gulp-if

Lets say I want to include all js files in a source folder concatenated in a specific order (using gulp-concat). However, I want to only include certain files if there is a production flag.
For example, in production I need these files:
modals.js
utilities.js
analytics.js
signup.js
cookies.js
However, for local and staging, I don't want these two:
analytics.js
cookies.js
Is it possible using yargs and perhaps gulp-if to specify this? I know that I could have two lists of sources, for instance:
if (argv.production) {
return gulp.src([
'modals.js',
'utilities.js',
'analytics.js',
'signup.js',
'cookies.js'
])
} else if (argv.staging) {
return gulp.src([
'modals.js',
'utilities.js',
'signup.js',
])
}
However, this would mean I'm not adhering to DRY principles, and just feels silly and prone to error. Can I somehow stitch these together as a single source that only includes things like analytics and cookies if the task has a production flag?
In an ideal world, I could inject gulp if statements inside the gulp.src array that would only include the production-only files if the correct flag were passed.
Some out-loud thinking:
- Should I just create two files here? One a base that includes everything necessary across environments, and then a production specific one, then combine those? Seems unnecessarily complex, but could solve the problem.
If your goal is to be a bit more DRY, you can give this a shot, since your set of staging files is a subset of the production files:
var base_list = ['modals.js', 'utilities.js', 'signup.js'];
if (argv.staging) {
return gulp.src(base_list);
} else if (argv.production) {
var prod_list = base_list.concat(['analytics.js', 'cookies.js']);
return gulp.src(prod_list);
}
Or more simplified (as long as only production needs the other files, and all other environments get the subset):
var file_list = ['modals.js', 'utilities.js', 'signup.js'];
if (argv.production) {
file_list = file_list.concat(['analytics.js', 'cookies.js']);
}
return gulp.src(file_list);
There might be some other gulp-specific best practices or tools that could come into play here, but some simple JavaScript might do the trick.

I'm using Gulp and failing to produce the final development script for production.

So I'm having a slight problem with producing production ready scripts for my project. I'm using gulp to concatenate and minify my css and js, and while the css is working fine the gulp js function isn't generating my final file. Please refer to my code below:
gulp.task('js', function() {
return gulp.src([source + 'js/app/**/*.js'])
.pipe(concat('development.js'))
.pipe(gulp.dest(source + 'js'))
.pipe(rename({
basename: 'production',
suffix: '-min',
}))
.pipe(uglify())
.pipe(gulp.dest(source + 'js/'))
.pipe(notify({ message: 'Scripts task complete', onLast: true }));
});
If anyone has encountered a similar problem or has any tips it would be much appreciated :)
There is nothing wrong with your gulpfile. I tested it and it works perfectly.
The only thing I can guess is that your source is not set correctly. Did you forget the trailing slash '/' ?
I would suggest 2 things to figure it out. Include node path library to check where source is actually pointing to like this:
var path = require('path');
// in gulp task ...
path.resolve(path.resolve(source + 'js/app'));
Make sure it points where you think it does.
Secondly, you could use gulp-debug to establish that any files are found:
npm install gulp-debug
Then
var debug = require('gulp-debug');
// in gulp task ...
return gulp.src([source + 'js/app/**/*.js'])
.pipe(concat('development.js'))
.pipe(debug())
.pipe(gulp.dest(source + 'js'))
.pipe(debug())
// etc.
Good luck!
Based on additional infomation in the comments I realise you are generating JS files in a separate process ...
gulp is asynchronous by default. What this boils down to is that all functions try to run at the same time - if you want a specific order it must be by design. This is great because it's very fast but can be a headache to work with.
Problem
Here is what's basically happening:
// SOME TASK THAT SHOULD BE RUN FIRST
gulp.task('copy-vendor-files-to-tempfolder', function (done) {
// copy files to vendor folder
done()
})
// SOME TASKS THAT DEPEND ON FIRST TASK
gulp.task('complile-styles', function () { /* independent task */ })
gulp.task('concat-vendor-files', function () { /* concat files in vendor folder. depends on vendor files existing */ })
// GENERAL TASK WHICH STARTS OTHERS
gulp.task('ready', ['copy-vendor-files-to-tempfolder', 'compile-styles', 'concat-vendor-files])
When you try to run:
$ gulp ready
GULP TASK WILL FAIL! Folder is being created at the same time!!
NOWHERE TO COPY FILES!
Solution
There are many solutions but the following module has come in handy for me again and again:
npm install run-sequence
Then in your gulpfile.js:
var runSequence = require('run-sequence')
gulp.task('ready', function (done) {
runSequence(
'create-folders', // do this first,
[
'copy-css-files',
'copy-html-files'
], // do these AFTER but in parallel
done // callback when ready
)
})
This will guarantee the folder exists when you try to run the other functions.
In your specific case, you should make sure the task that concatenates the JS files is run after the task that copies them out of vendor.
Note: I'm leaving other answer because it contains useful help for debugging similar issues.
HTH!

how to output multiple bundles with browserify and gulp

I have browserify bundling up files and it's working great. But what if I need to generate multiple bundles?
I would like to end up with dist/appBundle.js and dist/publicBundle.js
gulp.task("js", function(){
return browserify([
"./js/app.js",
"./js/public.js"
])
.bundle()
.pipe(source("bundle.js"))
.pipe(gulp.dest("./dist"));
});
Obviously this isn't going to work since I am only specifying one output (bundle.js). I can accomplish this by repeating the above statement like so (but it doesn't feel right, because of the repetition):
gulp.task("js", function(){
browserify([
"./js/app.js"
])
.bundle()
.pipe(source("appBundle.js"))
.pipe(gulp.dest("./dist"));
browserify([
"./js/public.js"
])
.bundle()
.pipe(source("publicBundle.js"))
.pipe(gulp.dest("./dist"));
});
Is there a better way to tackle this? Thanks!
I don't have a good environment to test this in right now, but my guess is that it would look something like:
gulp.task("js", function(){
var destDir = "./dist";
return browserify([
"./js/app.js",
"./js/public.js"
])
.bundle()
.pipe(source("appBundle.js"))
.pipe(gulp.dest(destDir))
.pipe(rename("publicBundle.js"))
.pipe(gulp.dest(destDir));
});
EDIT: I just realized I mis-read the question, there should be two separate bundles coming from two separate .js files. In light of that, the best alternative I can think of looks like:
gulp.task("js", function(){
var destDir = "./dist";
var bundleThis = function(srcArray) {
_.each(srcArray, function(source) {
var bundle = browserify(["./js/" + source + ".js"]).bundle();
bundle.pipe(source(source + "Bundle.js"))
.pipe(gulp.dest(destDir));
});
};
bundleThis(["app", "public"]);
});
gulp.task("js", function (done) {
[
"app",
"public",
].forEach(function (entry, i, entries) {
// Count remaining bundling operations to track
// when to call done(). Could alternatively use
// merge-stream and return its output.
entries.remaining = entries.remaining || entries.length;
browserify('./js/' + entry + '.js')
.bundle()
// If you need to use gulp plugins after bundling then you can
// pipe to vinyl-source-stream then gulp.dest() here instead
.pipe(
require('fs').createWriteStream('./dist/' + entry + 'Bundle.js')
.on('finish', function () {
if (! --entries.remaining) done();
})
);
});
});
This is similar to #urban_racoons answer, but with some improvements:
That answer will fail as soon as you want the task to be a dependency of another task in gulp 3, or part of a series in gulp 4. This answer uses a callback to signal task completion.
The JS can be simpler and doesn't require underscore.
This answer is based on the premise of having a known list of entry files for each bundle, as opposed to, say, needing to glob a list of entry files.
Multiple bundles with shared dependencies
I recently added support for multiple bundles with shared dependencies to https://github.com/greypants/gulp-starter
Here's the array of browserify config objects I pass to my browserify task. At the end of that task, I iterate over each config, browserifying all the things.
config.bundleConfigs.forEach(browserifyThis);
browserifyThis takes a bundleConfig object, and runs browserify (with watchify if dev mode).
This is the bit that sorts out shared dependencies:
// Sort out shared dependencies.
// b.require exposes modules externally
if(bundleConfig.require) b.require(bundleConfig.require)
// b.external excludes modules from the bundle, and expects
// they'll be available externally
if(bundleConfig.external) b.external(bundleConfig.external)
This browserify task also properly reports when all bundles are finished (the above example isn't returning streams or firing the task's callback), and uses watchify when in devMode for super fast recompiles.
Brian FitzGerald's last comment is spot on. Remember that it's just JavaScript!

Gulp task to delete empty files

When compiling TypeScript, I get a lot of empty JavaScript files generated because their TypeScript counterparts contain only interfaces. There's currently no tsc option to suppress generation of these files. I'm using the gulp-tsc plugin to compile.
Is there a plugin or some other means to clean up empty files, or even a more general purpose gulp plugin that would allow me to delete files based on their name and content? Or is there a way to do this in gulp without using plugins?
I think you could relatively easily use node-glob to look for matching files outside gulp, something like this:
var glob = require('node-glob'),
fs = require('fs);
gulp.task('delete-empty-files', function(cb) {
glob('/path/to/generated/**/*.js', function(err, files) {
files.forEach(function(file) {
if(fs.statSync(file).size === 0) {
fs.unlinkSync(file);
}
});
// make sure the task runs asynchronously!
cb();
});
});
Alternatively, you could use gulp-tap to achieve a similar result, like so:
var tap = require('gulp-tap'),
fs = require('fs);
gulp.task('delete-empty-files', function() {
return gulp.src('/path/to/generated/**/*.js')
.pipe(tap(function(file) {
if(file.stat.size === 0) {
fs.unlinkSync(file);
}
});
});
});
They're pretty equal, really. I think the first one will be faster, because it won't have the extra stuff from gulp and vinyl-fs.
You might be able to add the {read: false} options to gulp.src() to speed things up, but that might also disable the .stat from being read. If you want to try it, you add it like this:
return gulp.src('/path/to/files/**/*.js', {read: false}).pipe(...)
A better answer would be to just name those interface files with the .d.ts extension instead of .ts. No files are generated this way.
Alternatively, for anyone else facing similar issues, gulp-clip-empty-files solves this.
Remove empty files from stream. This prevent errors on some other
plugins like gulp-sass and can be usefull removing placeholders.
The usage is quite simple...
var gulp = require('gulp');
var clip = require('gulp-clip-empty-files');
gulp.task('default', function () {
return gulp.src('src/*.scss')
.pipe(clip())
.pipe(gulp.dest('dist'));
});
In this example all empty .scss files will be "clipped" (ignored?) before written out

requireJS an entire folder

Is it possible to "require" an entire folder using requireJS.
For example, I have a behaviors folder with a ton of behavior js files. I'd really like to be able to simply use require(['behaviors/*'], function() {...}); to load everything in that folder rather than having to keep that list up to date. Once compressed and optimized I'd have all those files lump together, but for development it's easier to work with them individually.
javascript in browser has no filesystem access and so it can't scan a directory for files. If you are building your app in a scripting language like php or ruby you could write a script that scans the directory and adds the file names to the require() call.
I don't know if I can recommend this approach anymore. I think the more explicit way to do this is by manually "requiring"/"exporting" the functionality you need. The exception I think is if you have a "namespace" of files that you want exported see below "Babel and ES6 Module Import Declarations (export-namespace-from) or see below "Babel and ES6 Module Import Declarations.
These solutions also assume that you have a meaningful file structure - where file names become part of that "require" * definition.
However, if you still need to do this there are a few existing tools and methods that might provide the behavior that you're looking for.
Possible Solutions
Babel and ES6 Module Import Declarations (plugin-export-namespace-from)
Have a setup that is ES6 compliant.
You need to update your .babelrc file to include babel-plugin-proposal-export-namespace-from.
Use export namespace plugin by writing syntax like the following:
common/index.js
export * from './common/a'; // export const a = false;
export * from './common/b'; // export const b = true;
main.js
import { a, b } from './common';
console.log(a); // false
console.log(b); // true
Babel and ES6 Module Import Declarations (plugin-wildcard)
Have a setup that is ES6 compliant.
You need to update your .babelrc file to include babel-plugin-wildcard.
Use wildcard namespace plugin by writing syntax like the following:
main.js
import { a, b } from './common/*'; // imports './common/a.js' and './common/b.js'
console.log(a); // false
console.log(b); // true
RequireJS (Now Outdated)
Download and install require-wild npm install require-wild
Configure the declaration as follows
grunt.initConfig({
requireWild: {
app: {
// Input files to look for wildcards (require|define)
src: ["./**/*.js"],
// Output file contains generated namespace modules
dest: "./namespaces.js",
// Load your require config file used to find baseUrl - optional
options: { requireConfigFile: "./main.js" }
}
}
});
grunt.loadNpmTasks("require-wild");
grunt.registerTask('default', ['requireWild']);
Then run the grunt task. Your file will be generated. Modify your setup to load namespaces.js
require(['namespaces'], function () { ... });
This now allows modules under src to use dependencies glob pattern matching.
require(['behaviors/**/*'], function (behaviors) { }
I know this is old, but I'd like to share my solution:
For this solution you need JQuery
1) Create a bash script that will list all the js files in
"MyDirectory/", and save it to "directoryContents.txt":
#!/bin/bash
#Find all the files in that directory...
for file in $( find MyDirectory/ -type f -name "*.js" )
do
fileClean=${file%.js} #Must remove .js from the end!
echo -n "$fileClean " >> MyDirectory/directoryContents.txt
done
File will look like this:
MyDirectory/FirstJavascriptFile MyDirectory/SecondJavascriptFile
MyDirectory/ThirdJavascriptFile
Problem with my script! Puts an extra " " at the end, that messes things up! Make sure to remove the excess space at the end of directoryContents.txt
2) Then in your Client side JS code:
do a "GET" request to retrieve the text file
For each entry (split by the space), 'require' that file:
.
$.get( "MyDirectory/directoryContents.txt", {}, function( data ) {
var allJsFilesInFolder = data.split(" ");
for(var a=0; a<allJsFilesInFolder.length; a++)
{
require([allJsFilesInFolder[a]], function(jsConfig)
{
//Done loading this one file
});
}
}, "text");
I was having a problem with this code not finishing before my other code, so Here's my extended answer:
define([''], function() {
return {
createTestMenu: function()
{
this.loadAllJSFiles(function(){
//Here ALL those files you need are loaded!
});
},
loadAllJSFiles: function(callback)
{
$.get( "MyDirectory/directoryContents.txt", {}, function( data ) {
var allJsFilesInFolder = data.split(" ");
var currentFileNum = 0;
for(var a=0; a<allJsFilesInFolder.length; a++)
{
require([allJsFilesInFolder[a]], function(jsConfig)
{
currentFileNum++;
//If it's the last file that needs to be loaded, run the callback.
if (currentFileNum==allJsFilesInFolder.length)
{
console.log("Done loading all configuration files.");
if (typeof callback != "undefined"){callback();}
}
});
}
}, "text");
}
}
});
What I ended up doing was everytime my Node server boots, it will run the bash script, populating directoryContents.txt. Then My client side just reads directoryContents.txt for the list of files, and requires each in that list.
Hope this helps!
There isn't really a way to do this conceptually on the fly (that I know of).
There's a few work arounds though:
Use grunt and concat and then just require that behemoth...I know, kinda sucky.
What I think is a better solution... use a require hierarchy like so:
require('/js/controllers/init', function(ctrls){
ctrls(app, globals);
});
// /js/controllers/init.js
define('js/controllers/index', 'js/controllers/posts', function(index, posts){
return function protagonist(app, globals){
var indexModule = index(app, globals);
var indexModule = posts(app, globals);
return app || someModule;
};
});
// /js/controllers/index.js
define('js/controllers/index', 'js/controllers/posts', function(index, posts){
return function protagonist(app, globals){
function method1(){}
function method2(){}
return {
m1: method1,
m2: method2
};
};
});
Note that "protagonist" function. That allows you to initialize modules before their use, so now you can pass in a 'sandbox' -- in this case app and globals.
Realistically, you wouldn't have /js/controllers/index.js... It should probably be something like /js/controllers/index/main.js or /js/controllers/index/init.js so that there is a directory adjacent to (sibling of) /js/controllers/init.js called "index". This will make your modules scalable to a given interface -- you can simply swap modules out and keep your interface the same.
Hope this helps! Happy coding!
I wrote a library to solve this problem. Eventually someone else came along and improved my library, here it is:
https://github.com/smartprocure/directory-metagen
You can use my lib with Gulp or whatever - it generates metadata for your project and RequireJS can use that metadata to require the desired files from the filesystem.
Using this lib will produce a RequireJS module that looks something like this:
define(
[
"text!app/templates/dashboardTemplate.ejs",
"text!app/templates/fluxCartTemplate.ejs",
"text!app/templates/footerTemplate.ejs",
"text!app/templates/getAllTemplate.ejs",
"text!app/templates/headerTemplate.ejs",
"text!app/templates/homeTemplate.ejs",
"text!app/templates/indexTemplate.ejs",
"text!app/templates/jobsTemplate.ejs",
"text!app/templates/loginTemplate.ejs",
"text!app/templates/overviewTemplate.ejs",
"text!app/templates/pictureTemplate.ejs",
"text!app/templates/portalTemplate.ejs",
"text!app/templates/registeredUsersTemplate.ejs",
"text!app/templates/userProfileTemplate.ejs"
],
function(){
return {
"templates/dashboardTemplate.ejs": arguments[0],
"templates/fluxCartTemplate.ejs": arguments[1],
"templates/footerTemplate.ejs": arguments[2],
"templates/getAllTemplate.ejs": arguments[3],
"templates/headerTemplate.ejs": arguments[4],
"templates/homeTemplate.ejs": arguments[5],
"templates/indexTemplate.ejs": arguments[6],
"templates/jobsTemplate.ejs": arguments[7],
"templates/loginTemplate.ejs": arguments[8],
"templates/overviewTemplate.ejs": arguments[9],
"templates/pictureTemplate.ejs": arguments[10],
"templates/portalTemplate.ejs": arguments[11],
"templates/registeredUsersTemplate.ejs": arguments[12],
"templates/userProfileTemplate.ejs": arguments[13]
}
});
You can then require modules in your front-end like so:
var footerView = require("app/js/jsx/standardViews/footerView");
however, as you can see this is too verbose, so the magic way is like so:
name the dependency above as allViews!
now you can do:
var allViews = require('allViews');
var footerView = allViews['standardViews/footerView'];
There are two advantages to requiring directories whole:
(1) in production, with the r.js optimizer, you can point to one dependency (module A) and it can then easily trace all of A's dependencies that represent a entire directory
(2) in development, you can require whole directories up front and then use synchronous syntax to require dependencies because you know they have already been loaded
enjoy "RequireJS-Metagen"
https://github.com/smartprocure/directory-metagen
https://www.npmjs.com/package/requirejs-metagen
https://github.com/ORESoftware/requirejs-metagen

Categories

Resources