Gulp task failing only with gulp.watch() - javascript

I have written a gulp task which finds .json files in a directory, assumes there is a corresponding .atlas file, and inlines them into my JS project inside one object:
gulp.task('spine', function() {
var fs = require('fs');
function parseAtlas(atlasPath) {
var atlas = fs.readFileSync(atlasPath, 'utf8').replace(/[ ]/g, '');
return JSON.stringify(atlas);
}
return gulp.src(config.src+'spineData/*.json')
.pipe(tap(function(file) {
var path = file.path.split('/');
var fullName = path.pop();
var name = fullName.split('.')[0];
path = path.join('/')+'/';
var atlas = parseAtlas(path+name+'.atlas');
var json = file.contents;
file.contents = Buffer.concat([
new Buffer("var app = (function(mod) { mod.pixi.spineData['"+name+"'] = {\n"),
new Buffer("atlas: "+atlas+",\n"),
new Buffer("json: "+json+"\n"),
new Buffer("}; return mod; })(app || {});")
]);
}))
.pipe(uglifyJS('data.min.js', {
outSourceMap: true,
basePath: config.dest,
sourceRoot: '/'
}))
.on('error', util.handleErrors)
.pipe(gulp.dest(config.dest));
});
The task works as expected except when using gulp.watch(). An external program changes file.json and the task fails with:
gulp-uglifyjs - UglifyJS threw an error
error: Unexpected token: punc (})
I am guessing it's something to do with gulp.watch() being called before the .json file is completely written by the external program. These are fairly big files - currently at around 6000 lines.
I've tried adding a delay (using gulp-wait) both before the tap pipe and the uglify pipe but still always fails with gulp.watch().

Related

How can I change the output of the console in javascript to a file? [duplicate]

Can I configure console.log so that the logs are written on a file instead of being printed in the console?
You could also just overload the default console.log function:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(d) { //
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
Above example will log to debug.log and stdout.
Edit: See multiparameter version by Clément also on this page.
Update 2013 - This was written around Node v0.2 and v0.4; There are much better utilites now around logging. I highly recommend Winston
Update Late 2013 - We still use winston, but now with a logger library to wrap the functionality around logging of custom objects and formatting. Here is a sample of our logger.js https://gist.github.com/rtgibbons/7354879
Should be as simple as this.
var access = fs.createWriteStream(dir + '/node.access.log', { flags: 'a' })
, error = fs.createWriteStream(dir + '/node.error.log', { flags: 'a' });
// redirect stdout / stderr
proc.stdout.pipe(access);
proc.stderr.pipe(error);
If you are looking for something in production winston is probably the best choice.
If you just want to do dev stuff quickly, output directly to a file (I think this works only for *nix systems):
nohup node simple-server.js > output.log &
I often use many arguments to console.log() and console.error(), so my solution would be:
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
logFile.write(util.format.apply(null, arguments) + '\n');
logStdout.write(util.format.apply(null, arguments) + '\n');
}
console.error = console.log;
Winston is a very-popular npm-module used for logging.
Here is a how-to.
Install winston in your project as:
npm install winston --save
Here's a configuration ready to use out-of-box that I use frequently in my projects as logger.js under utils.
/**
* Configurations of logger.
*/
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
const consoleConfig = [
new winston.transports.Console({
'colorize': true
})
];
const createLogger = new winston.Logger({
'transports': consoleConfig
});
const successLogger = createLogger;
successLogger.add(winstonRotator, {
'name': 'access-file',
'level': 'info',
'filename': './logs/access.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
const errorLogger = createLogger;
errorLogger.add(winstonRotator, {
'name': 'error-file',
'level': 'error',
'filename': './logs/error.log',
'json': false,
'datePattern': 'yyyy-MM-dd-',
'prepend': true
});
module.exports = {
'successlog': successLogger,
'errorlog': errorLogger
};
And then simply import wherever required as this:
const errorLog = require('../util/logger').errorlog;
const successlog = require('../util/logger').successlog;
Then you can log the success as:
successlog.info(`Success Message and variables: ${variable}`);
and Errors as:
errorlog.error(`Error Message : ${error}`);
It also logs all the success-logs and error-logs in a file under logs directory date-wise as you can see here.
const fs = require("fs");
const {keys} = Object;
const {Console} = console;
/**
* Redirect console to a file. Call without path or with false-y
* value to restore original behavior.
* #param {string} [path]
*/
function file(path) {
const con = path ? new Console(fs.createWriteStream(path)) : null;
keys(Console.prototype).forEach(key => {
if (path) {
this[key] = (...args) => con[key](...args);
} else {
delete this[key];
}
});
};
// patch global console object and export
module.exports = console.file = file;
To use it, do something like:
require("./console-file");
console.file("/path/to.log");
console.log("write to file!");
console.error("also write to file!");
console.file(); // go back to writing to stdout
For simple cases, we could redirect the Standard Out (STDOUT) and Standard Error (STDERR) streams directly to a file(say, test.log) using '>' and '2>&1'
Example:
// test.js
(function() {
// Below outputs are sent to Standard Out (STDOUT) stream
console.log("Hello Log");
console.info("Hello Info");
// Below outputs are sent to Standard Error (STDERR) stream
console.error("Hello Error");
console.warn("Hello Warning");
})();
node test.js > test.log 2>&1
As per the POSIX standard, 'input', 'output' and 'error' streams are identified by the positive integer file descriptors (0, 1, 2). i.e., stdin is 0, stdout is 1, and stderr is 2.
Step 1: '2>&1' will redirect from 2 (stderr) to 1 (stdout)
Step 2: '>' will redirect from 1 (stdout) to file (test.log)
If this is for an application, you're probably better off using a logging module. It'll give you more flexibility. Some suggestions.
winston https://github.com/winstonjs/winston
log4js https://github.com/nomiddlename/log4js-node
Straight from nodejs's API docs on Console
const output = fs.createWriteStream('./stdout.log');
const errorOutput = fs.createWriteStream('./stderr.log');
// custom simple logger
const logger = new Console(output, errorOutput);
// use it like console
const count = 5;
logger.log('count: %d', count);
// in stdout.log: count 5
Another solution not mentioned yet is by hooking the Writable streams in process.stdout and process.stderr. This way you don't need to override all the console functions that output to stdout and stderr. This implementation redirects both stdout and stderr to a log file:
var log_file = require('fs').createWriteStream(__dirname + '/log.txt', {flags : 'w'})
function hook_stream(stream, callback) {
var old_write = stream.write
stream.write = (function(write) {
return function(string, encoding, fd) {
write.apply(stream, arguments) // comments this line if you don't want output in the console
callback(string, encoding, fd)
}
})(stream.write)
return function() {
stream.write = old_write
}
}
console.log('a')
console.error('b')
var unhook_stdout = hook_stream(process.stdout, function(string, encoding, fd) {
log_file.write(string, encoding)
})
var unhook_stderr = hook_stream(process.stderr, function(string, encoding, fd) {
log_file.write(string, encoding)
})
console.log('c')
console.error('d')
unhook_stdout()
unhook_stderr()
console.log('e')
console.error('f')
It should print in the console
a
b
c
d
e
f
and in the log file:
c
d
For more info, check this gist.
Overwriting console.log is the way to go. But for it to work in required modules, you also need to export it.
module.exports = console;
To save yourself the trouble of writing log files, rotating and stuff, you might consider using a simple logger module like winston:
// Include the logger module
var winston = require('winston');
// Set up log file. (you can also define size, rotation etc.)
winston.add(winston.transports.File, { filename: 'somefile.log' });
// Overwrite some of the build-in console functions
console.error = winston.error;
console.log = winston.info;
console.info = winston.info;
console.debug = winston.debug;
console.warn = winston.warn;
module.exports = console;
If you're using linux, you can also use output redirection. Not sure about Windows.
node server.js >> file.log 2>> file.log
>> file.log to redirect stdout to the file
2>> file.log to redirect stderr to the file
others use the shorthand &>> for both stdout and stderr but it's not accepted by both my mac and ubuntu :(
extra: > overwrites, while >> appends.
By the way, regarding NodeJS loggers, I use pino + pino-pretty logger
METHOD STDOUT AND STDERR
This approach can help you (I use something similar in my projects) and works for all methods including console.log, console.warn, console.error, console.info
This method write the bytes written in stdout and stderr to file. Is better than changing console.log, console.warn, console.error, console.info methods, because output will be exact the same as this methods output
var fs= require("fs")
var os= require("os")
var HOME= os.homedir()
var stdout_r = fs.createWriteStream(HOME + '/node.stdout.log', { flags: 'a' })
var stderr_r = fs.createWriteStream(HOME + '/node.stderr.log', { flags: 'a' })
var attachToLog= function(std, std_new){
var originalwrite= std.write
std.write= function(data,enc){
try{
var d= data
if(!Buffer.isBuffer(d))
d= Buffer.from(data, (typeof enc === 'string') ? enc : "utf8")
std_new.write.apply(std_new, d)
}catch(e){}
return originalwrite.apply(std, arguments)
}
}
attachToLog(process.stdout, stdout_r)
attachToLog(process.stderr, stderr_r)
// recommended catch error on stdout_r and stderr_r
// stdout_r.on("error", yourfunction)
// stderr_r.on("error", yourfunction)
Adding to the answer above, a lit bit of an expansion to the short and efficient code overriding console.log. Minor additions: set filename with date, wrapper function, also do the original console.logging to keep the console active with the info.
Usage: in the beginning of your code, run setConsoleLogToFile([FILENAME]).
const fs = require("fs"),
util = require('util');
const getPrettyDate = ()=> new Date().toString().replace(":","-").replace(/00\s\(.*\)/, "").replace(` ${new Date().getFullYear()}`, ",").replace(/:\d\d\s/, " ");
module.exports.getPrettyDate = getPrettyDate;
module.exports.setConsoleLogToFile = (filename) => {
const log_file = fs.createWriteStream(`${__dirname}/${filename} - ${getPrettyDate()}.log`, { flags: 'w' }),
log_stdout = process.stdout;
const origConsole = console.log;
console.log = (d) => {
origConsole(d);
log_file.write(util.format(d) + '\n');
log_stdout.write(util.format(d) + '\n');
};
}
Most logger is overkill and does not support the build in console.log correctly. Hence I create console-log-to-file:
import { consoleLogToFile } from "console-log-to-file";
// or `const { consoleLogToFile } = require("console-log-to-file/dist/index.cjs.js")`
consoleLogToFile({
logFilePath: "/log/default.log",
});
// all of your console.log/warn/error/info will work as it does and save to file now.
If you are looking for a solution without modifying any code, here is a simple solution.
It requires pm2, just add it to your node modules and start you app with
pm2 start server.js
And you are done, console.logs are now automatically registered under home/.pm2/logs/server-out.log.
Improve on Andres Riofrio , to handle any number of arguments
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/debug.log', {flags : 'w'});
var log_stdout = process.stdout;
console.log = function(...args) {
var output = args.join(' ');
log_file.write(util.format(output) + '\r\n');
log_stdout.write(util.format(output) + '\r\n');
};
You can now use Caterpillar which is a streams based logging system, allowing you to log to it, then pipe the output off to different transforms and locations.
Outputting to a file is as easy as:
var logger = new (require('./').Logger)();
logger.pipe(require('fs').createWriteStream('./debug.log'));
logger.log('your log message');
Complete example on the Caterpillar Website
For future users. #keshavDulal answer doesn't work for latest version. And I couldn't find a proper fix for the issues that are reporting in the latest version 3.3.3.
Anyway I finally fixed it after researching a bit. Here is the solution for winston version 3.3.3
Install winston and winston-daily-rotate-file
npm install winston
npm install winston-daily-rotate-file
Create a new file utils/logger.js
const winston = require('winston');
const winstonRotator = require('winston-daily-rotate-file');
var logger = new winston.createLogger({
transports: [
new (winston.transports.DailyRotateFile)({
name: 'access-file',
level: 'info',
filename: './logs/access.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
}),
new (winston.transports.DailyRotateFile)({
name: 'error-file',
level: 'error',
filename: './logs/error.log',
json: false,
datePattern: 'yyyy-MM-DD',
prepend: true,
maxFiles: 10
})
]
});
module.exports = {
logger
};
Then in any file where you want to use logging import the module like
const logger = require('./utils/logger').logger;
Use logger like the following:
logger.info('Info service started');
logger.error('Service crashed');
if you are using forever to keep your node app running, then typing forever list will show you the path to the log file that console.log is writing too
You can use the nodejs Console constructor
const mylog = new console.Console(
fs.createWriteStream("log/logger.log"),
fs.createWriteStream("log/error.log")
);
And then you can use it just like the normal console class, eg:
mylog.log("Ok!"); // Will be written into 'log/logger.log'
mylog.error("Bad!"); // Will be written into 'log/error.log'
You can also have a look at this npm module:
https://www.npmjs.com/package/noogger
noogger
simple and straight forward...
I took on the idea of swapping the output stream to a my stream.
const LogLater = require ('./loglater.js');
var logfile=new LogLater( 'log'+( new Date().toISOString().replace(/[^a-zA-Z0-9]/g,'-') )+'.txt' );
var PassThrough = require('stream').PassThrough;
var myout= new PassThrough();
var wasout=console._stdout;
myout.on('data',(data)=>{logfile.dateline("\r\n"+data);wasout.write(data);});
console._stdout=myout;
var myerr= new PassThrough();
var waserr=console._stderr;
myerr.on('data',(data)=>{logfile.dateline("\r\n"+data);waserr.write(data);});
console._stderr=myerr;
loglater.js:
const fs = require('fs');
function LogLater(filename, noduplicates, interval) {
this.filename = filename || "loglater.txt";
this.arr = [];
this.timeout = false;
this.interval = interval || 1000;
this.noduplicates = noduplicates || true;
this.onsavetimeout_bind = this.onsavetimeout.bind(this);
this.lasttext = "";
process.on('exit',()=>{ if(this.timeout)clearTimeout(this.timeout);this.timeout=false; this.save(); })
}
LogLater.prototype = {
_log: function _log(text) {
this.arr.push(text);
if (!this.timeout) this.timeout = setTimeout(this.onsavetimeout_bind, this.interval);
},
text: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text);
},
line: function log(text, loglastline) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(text + '\r\n');
},
dateline: function dateline(text) {
if (this.noduplicates) {
if (this.lasttext === text) return;
this.lastline = text;
}
this._log(((new Date()).toISOString()) + '\t' + text + '\r\n');
},
onsavetimeout: function onsavetimeout() {
this.timeout = false;
this.save();
},
save: function save() { fs.appendFile(this.filename, this.arr.splice(0, this.arr.length).join(''), function(err) { if (err) console.log(err.stack) }); }
}
module.exports = LogLater;
I just build a pack to do this, hope you like it ;)
https://www.npmjs.com/package/writelog
I for myself simply took the example from winston and added the log(...) method (because winston names it info(..):
Console.js:
"use strict"
// Include the logger module
const winston = require('winston');
const logger = winston.createLogger({
level: 'info',
format: winston.format.json(),
transports: [
//
// - Write to all logs with level `info` and below to `combined.log`
// - Write all logs error (and below) to `error.log`.
//
new winston.transports.File({ filename: 'error.log', level: 'error' }),
new winston.transports.File({ filename: 'combined.log' })
]
});
//
// If we're not in production then log to the `console` with the format:
// `${info.level}: ${info.message} JSON.stringify({ ...rest }) `
//
if (process.env.NODE_ENV !== 'production') {
logger.add(new winston.transports.Console({
format: winston.format.simple()
}));
}
// Add log command
logger.log=logger.info;
module.exports = logger;
Then simply use in your code:
const console = require('Console')
Now you can simply use the normal log functions in your file and it will create a file AND log it to your console (while debugging/developing). Because of if (process.env.NODE_ENV !== 'production') { (in case you want it also in production)...
Create a utils/logger.js file with:
var fs = require('fs');
var util = require('util');
var log_file = fs.createWriteStream(__dirname + '/../logs/server.log', { flags: 'w' });
var log_stdout = process.stdout;
console.log = function () { //
[...arguments].forEach(element => {
log_file.write(util.format(element) + '\n');
log_stdout.write(util.format(element) + '\n');
});
};
module.exports = {
console
}
Include the logger.js file from any file where you want to console.log like:
const console = require('./utils/logger').console;
Create a logs folder and create an empty server.log file in it and run your app :)
Rudy Huynh's solution worked really well for me. I added a little bit to have it spit out files with today's date and time.
var dateNow = new Date();
var timeNow = dateNow.getHours() + '-' + dateNow.getMinutes();
var logPath = "log/" + dateNow.toDateString() + ' -' + ' Start Time - ' + timeNow + ".log"
consoleLogToFile({
logFilePath: logPath
});
It's not very elegant but this way it'll save different, easy to read, log files instead of just updating the same "default.log" file.
Based on multiparameter version by Clément, just without color codes for the text file
var fs = require('fs');
var util = require('util');
var logFile = fs.createWriteStream('log.txt', { flags: 'a' });
// Or 'w' to truncate the file every time the process starts.
var logStdout = process.stdout;
console.log = function () {
// Storing without color codes
logFile.write(util.format.apply(null,arguments).replace(/\033\[[0-9;]*m/g,"") + '\n');
// Display normally, with colors to Stdout
logStdout.write(util.format.apply(null, arguments) + '\n');
}
Note: Answering since I couldn't comment

How to rename a zip file after an HTML file in a Gulp task

I am attempting to zip up all files in my dist folder using gulp-zip and would like to dynamically name the zip file after the only html file in that directory. Here's my attempt using gulp-filenames, but have had no luck.
const rename = require('gulp-rename');
const zip = require('gulp-zip');
var filenames = require('gulp-filenames');
gulp.task('zipp', function(){
gulp.src('dist/*')
.pipe(zip('_final.zip'))
.pipe(rename({prefix:getHtmlName()}))
.pipe(gulp.dest('./dist'))
})
function getHtmlName(){
gulp.src("./src/*.html")
.pipe(filenames("html"))
.pipe(gulp.dest("./dist"));
return filenames.get("html");
}
Here are two ways:
const gulp = require('gulp');
const rename = require('gulp-rename');
const zip = require('gulp-zip');
const filenames = require('gulp-filenames');
// run the getHTMLname task first, filenames will then have the data stored in an array
gulp.task('zipp', ['getHTMLname'], function () {
return gulp.src('dist/*')
.pipe(zip('_final.zip'))
// rename gets the file passing through it, in this case '_final.zip'
.pipe(rename(function (path) {
// retrieve the array of string filenames, use the first and only one in the array
// and get the basename via split
path.basename = filenames.get("html")[0].split('.')[0] + path.basename;
}))
.pipe(gulp.dest('./dist'))
})
gulp.task('getHTMLname', function () {
return gulp.src("./dist/*.html")
.pipe(filenames("html"))
});
// **************************************************************
const gulp = require('gulp');
const rename = require('gulp-rename');
const zip = require('gulp-zip');
const glob = require("glob");
const path = require('path');
gulp.task('zipp', function () {
return gulp.src('dist/*')
.pipe(zip('_final.zip'))
.pipe(rename(function (file) {
// glob.sync returns an array, get the first member of that array
// path.basename('string', '.html') returns the name of the file without the extension
var temp = path.basename(glob.sync("./dist/*.html")[0], '.html');
file.basename = temp + file.basename;
}))
.pipe(gulp.dest('./dist'))
});
If you really want to use gulp-filenames, use the code above the ********'s. The 'getHTMLname' task must run first - that is when gulp-filenames builds its array of file names from whatever source directory you feed it. That array can later be used anywhere in a subsequent task.
However, I recommend the second method using glob and path modules, which you should learn anyway. The second method does not require a separate task, it just gets, globs, the html filename you want right in the same task so it is simpler. Also when I installed gulp-filenames it is using a ton of deprecated modules so it badly needs to be updated. It works now but may not as you update node, etc.

gulp-load-plugins.sourcemaps.init() TypeError: Cannot read property 'init' of undefined

I'm trying to adapt a gulp file to my purposes and I'm running into issues. I only care about one task:
gulp.task('js:browser', function () {
return mergeStream.apply(null,
Object.keys(jsBundles).map(function(key) {
return bundle(jsBundles[key], key);
})
);
});
It is using browserify to condense my bundle into a usable single file. It uses these two methods and this object:
function createBundle(src) {
//if the source is not an array, make it one
if (!src.push) {
src = [src];
}
var customOpts = {
entries: src,
debug: true
};
var opts = assign({}, watchify.args, customOpts);
var b = watchify(browserify(opts));
b.transform(babelify.configure({
stage: 1
}));
b.transform(hbsfy);
b.on('log', plugins.util.log);
return b;
}
function bundle(b, outputPath) {
var splitPath = outputPath.split('/');
var outputFile = splitPath[splitPath.length - 1];
var outputDir = splitPath.slice(0, -1).join('/');
console.log(outputFile);
console.log(plugins);
return b.bundle()
// log errors if they happen
.on('error', plugins.util.log.bind(plugins.util, 'Browserify Error'))
.pipe(source(outputFile))
// optional, remove if you don't need to buffer file contents
.pipe(buffer())
// optional, remove if you dont want sourcemaps
.pipe(plugins.sourcemaps.init({loadMaps: true})) // loads map from browserify file
// Add transformation tasks to the pipeline here.
.pipe(plugins.sourcemaps.write('./')) // writes .map file
.pipe(gulp.dest('build/public/' + outputDir));
}
var jsBundles = {
'js/polyfills/promise.js': createBundle('./public/js/polyfills/promise.js'),
'js/polyfills/url.js': createBundle('./public/js/polyfills/url.js'),
'js/settings.js': createBundle('./public/js/settings/index.js'),
'js/main.js': createBundle('./public/js/main/index.js'),
'js/remote-executor.js': createBundle('./public/js/remote-executor/index.js'),
'js/idb-test.js': createBundle('./public/js/idb-test/index.js'),
'sw.js': createBundle(['./public/js/sw/index.js', './public/js/sw/preroll/index.js'])
};
When I run the gulp task js:bower I get the following error coming from the the .pipe(plugins.sourcemaps.init({loadMaps: true})) expression:
TypeError: Cannot read property 'init' of undefined
I know that the lines are optional and I can just comment them out, but I do want them. When I run the code in the example file it works properly, when I run it in my gulp file it gives me the error. Any suggestions on what I might be missing? Thanks!
gulp-load-plugins analyzes the contents of your package.json file to find out which Gulp plugins you have installed. Make sure that gulp-sourcemaps is among the "devDependencies" defined there. If not run
npm install --save-dev gulp-sourcemaps
There's a small chance that your problem is related to lazy loading the sourcemaps plugin. If the above doesn't help try requiring gulp-load-plugins like this:
var plugins = require('gulp-load-plugins')({lazy:false});

How do you use browserify in a Gulp task?

I'm pretty new to Gulp, but by following this tutorial I set up a Gulp task that is meant to browserify javascript files in a particular directory and pipe them to a different directory - pretty simple. I've looked a few other tutorials, but this method seemed to be the most concise. Here is my code:
var gulp = require('gulp');
var browserify = require('browserify');
var transform = require('vinyl-transform');
gulp.task('js', function() {
var browserified = transform(function(filename) {
return browserify(filename).bundle();
});
return gulp.src(['./public/js/src/**/*.js'])
.pipe(browserified)
.pipe(gulp.dest('./public/js/dist'));
});
The above code is very similar to many other implementations of this sort I've seen, but when I try running it with gulp js, it produces the following error:
[15:47:13] Using gulp file
~/development/launchpad/workshop/gulpfile.js
[15:47:13] Starting 'js'...
_stream_readable.js:540
var ret = dest.write(chunk);
^
TypeError: undefined is not a function
at Producer.ondata (_stream_readable.js:540:20)
at Producer.emit (events.js:107:17)
at Producer.Readable.read (_stream_readable.js:373:10)
at flow (_stream_readable.js:750:26)
at resume_ (_stream_readable.js:730:3)
at _stream_readable.js:717:7
at process._tickCallback (node.js:355:11)
Does anyone know what might cause this error?
(As a side note, I'd like to look at the files from the stack trace to try to figure out what is going on here, but searching for _stream_readable.js in Spotlight yields about 20 files of that name, all seemingly Node modules. Is there a way to determine the full path of a file in a stack trace?)
var browserify = require('browserify');
var gulp = require('gulp');
var source = require('vinyl-source-stream');
gulp.task('browserify', function() {
return browserify('lib/front/app.js')
.bundle()
//Pass desired output filename to vinyl-source-stream
.pipe(source('bundle.js'))
// Start piping stream to tasks!
.pipe(gulp.dest('public/build/'));
});
If you want browserify to work with gulp. dest and create a file where we specify it via .pipe (gulp.dest ('src/js')),
then you need to download vinyl-source-stream and throw it in .pipe(source('bundle.js')),
but actually in browserify, namely the bundle method accepts callback and neither dest nor source is needed
browserify({
entries: jsFile,
basedir: "src/js/dev",
debug: true,
})
.transform(babelify, {
presets: ['#babel/preset-env'],
})
.bundle((err, buffer) => {
let event = new EventEmitter();
if (err) {
event.emit('error',err)
}
else {
let data = minify(buffer.toString(), {}).code;
fs.createWriteStream('./src/js/bundle.js').write(data)
console.dir(222);
bs.reload()
}
})
Unfortunately, this is an issue with browserify/gulp, and there's nothing that vinyl-transform can do. The solution is to use vinyl-source-stream and vinyl-buffer:
var gulp = require('gulp');
var browserify = require('browserify');
var source = require('vinyl-source-stream');
var glob = require('node-glob');
gulp.task('browserify', function (cb) {
glob('./src/**/*.js', {}, function (err, files) {
var b = browserify();
files.forEach(function (file) {
b.add(file);
});
b.bundle().
.pipe(source('output.js'))
.pipe(gulp.dest('./dist'));
cb();
})
});
More information here.

Iterating over directories with Gulp?

I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`

Categories

Resources