I'm trying to write a node.js script that watches for changes in a directory of files, and then prints the files that are changed. How can I modify this script so that it watches a directory (instead of an individual file), and prints the names of the files in the directory as they are changed?
var fs = require('fs'),
sys = require('sys');
var file = '/home/anderson/Desktop/fractal.png'; //this watches a file, but I want to watch a directory instead
fs.watchFile(file, function(curr, prev) {
alert("File was modified."); //is there some way to print the names of the files in the directory as they are modified?
});
Try Chokidar:
var chokidar = require('chokidar');
var watcher = chokidar.watch('file or dir', {ignored: /^\./, persistent: true});
watcher
.on('add', function(path) {console.log('File', path, 'has been added');})
.on('change', function(path) {console.log('File', path, 'has been changed');})
.on('unlink', function(path) {console.log('File', path, 'has been removed');})
.on('error', function(error) {console.error('Error happened', error);})
Chokidar solves some of the crossplatform issues with watching files using just fs.
Why not just use the old fs.watch? Its pretty straightforward.
fs.watch('/path/to/folder', (eventType, filename) => {
console.log(eventType);
// could be either 'rename' or 'change'. new file event and delete
// also generally emit 'rename'
console.log(filename);
})
For more info and details about the options param, see Node fs Docs
try hound:
hound = require('hound')
// Create a directory tree watcher.
watcher = hound.watch('/tmp')
// Create a file watcher.
watcher = hound.watch('/tmp/file.txt')
// Add callbacks for file and directory events. The change event only applies
// to files.
watcher.on('create', function(file, stats) {
console.log(file + ' was created')
})
watcher.on('change', function(file, stats) {
console.log(file + ' was changed')
})
watcher.on('delete', function(file) {
console.log(file + ' was deleted')
})
// Unwatch specific files or directories.
watcher.unwatch('/tmp/another_file')
// Unwatch all watched files and directories.
watcher.clear()
It will execute once file was change
Related
I wanted to delete multiple files which are ending with .pdf under the current directory. Suppose I have 3 different pdf files, 1 image file, and one text file, so in these, I want to delete those 3 different pdf files only.
What I have tried.
1st method
fs.unlinkSync('./'+*+pdfname); -> I know this does not make sense
2nd method
try {
var files = (here the list of files should come. However i am failing to get those);
var path="./"
files.forEach(path => fs.existsSync(path) && fs.unlinkSync(path))
} catch (err) {
console.error("not exist")
}
Any different approaches would be appreciated.
Update for the solution:
I have got the solution for my requirement, I just wanted my function to delete all the pdf files and function to be synchronous. However 99% of the solution given by the below author -> https://stackoverflow.com/a/66558251/11781464
fs.readdir is asynchronous and just needs to make it synchronous fs.readdirSync.
below is the updated code and all the credit should go to the author https://stackoverflow.com/a/66558251/11781464.
Updated code
try {
const path = './'
// Read the directory given in `path`
fs.readdirSync(path).forEach((file) => {
// Check if the file is with a PDF extension, remove it
if (file.split('.').pop().toLowerCase() === 'pdf') {
console.log(`Deleting file: ${file}`);
fs.unlinkSync(path + file)
}
});
console.log("Deleted all the pdf files")
return true;
} catch (err) {
console.error("Error in deleting files",err);
}
You can read the directory using fs.readdir and then check for PDF files and delete them. Like this:
fs = require('fs');
try {
path = './'
// Read the directory given in `path`
const files = fs.readdir(path, (err, files) => {
if (err)
throw err;
files.forEach((file) => {
// Check if the file is with a PDF extension, remove it
if (file.split('.').pop().toLowerCase() == 'pdf') {
console.log(`Deleting file: ${file}`);
fs.unlinkSync(path + file)
}
});
});
} catch (err) {
console.error(err);
}
Preliminary Reading
The current working directory of the Node.js process - see update below answer
Path methods
resolve
extName
File System methods
fs.readdirSync
fs.unlinkSync
and classes
dirent
Example
"use strict";
const fs = require('fs');
const path = require('path');
const cwd = process.cwd();
fs.readdirSync( cwd, {withFileTypes: true})
.forEach( dirent => {
if(dirent.isFile()) {
const fileName = dirent.name;
if( path.extname(fileName).toLowerCase() === ".pdf") {
fs.unlinkSync( path.resolve( cwd, fileName));
}
}
});
Notes
untested code
If unlinkSync fails I would assume it returns -1 as per the unlink(2) man page linked in documentation. Personally I would test this using a filename that doesn't exist in cwd.
I believe the {withFileTypes: true} option for readdirSync returns dirent objects with a mime-type value that would allow you to check for files of type application/pdf regardless of extension (not attempted in the example).
Update: path(resolve) adds the current working directory at the beginning of the returned path, when necessary, by default. path.resolve(fileName) will work equally as well as path.resolve(cwd, fileName) in the example.
It seems you know how to delete (unlink) the files - you are asking how to get the file paths?
Try using glob:
const pdfFiles = require("glob").globSync("*.pdf");
Hi here i am attaching tested code for delete all ( only ) .pdf files and not other extension files like .txt , .docs etc from directory.
Note : You can delete any files or directory only from server side.
const fs = require('fs');
const path = require('path')
fs.readdir('../path to directory', (err, files) => {
const pdfFiles = files.filter(el => path.extname(el) === '.pdf')
pdfFiles.forEach(file => {
console.log("Removing File -> ",file);
var filename = "../path to directory/"+file;
fs.unlink(filename,function(err){
if(err) return console.log(err);
console.log('file deleted successfully');
});
});
});
This will gives you a following result in console log.
Removing File -> note.pdf
Removing File -> note2.pdf
file deleted successfully
file deleted successfully
Please feel free to comment any query if have..
I am using gulp to processes pages for a site, some of these pages are php files. The problem is that after all the pages are run through the template engine they have a .html file extension. I am adding a property to the file that designates if it's supposed to be a file besides html and then renaming the file to match. However, for some reason gulp-rename keeps saying that the variable I am using to store the extension is undefined.
Corresponding task in gulpfile.js:
var gulp = require('gulp'),
gutil = require('gulp-util'),
lodash = require('lodash'),
data = require('gulp-data'),
filesize = require('gulp-filesize'),
frontMatter = require('gulp-front-matter'),
rename = require('gulp-rename'),
util = require('util');
gulp.task('metalsmith', function(){
var ext; //Variable to store file extension
return gulp.src(CONTENT_DIR)
.pipe(frontMatter()).on("data", function(file){
lodash.assign(file, file.frontMatter);
delete file.frontMatter;
})
.pipe(data(function(file){ //ext is defined here
if(typeof file.filetype === 'undefined'){
ext = {extname: '.html'};
}else{
ext = {extname: file.filetype};
}
}))
.pipe(tap(function(file){
console.log(ext); //when I print to the console, ext is defined and correct
}))
.pipe(rename(ext)) //ext is undefined
.pipe(gulp.dest(BUILD_DIR))
});
when I run the above it errors with Unsupported renaming parameter type supplied.
Also I have tried it having options obj on the same line as rename(), with ext only storing the file extension eg: .pipe(rename({extname: ext})) which causes the files to have undefined added as the extension (instead of phpfile.php the below md file would be named phpfileundefined)
yaml in phpfile.md
---
layout: php.hbt
filetype: ".php"
title: "php"
---
package.json
"devDependencies": {
"gulp": "^3.9.1",
"gulp-data": "^1.2.1",
"gulp-filter": "^4.0.0",
"gulp-front-matter": "^1.3.0",
"gulp-rename": "^1.2.2",
"gulp-util": "^3.0.7",
"lodash": "^4.11.1"
}
Your problem is that by the time you execute rename(ext) the value of ext is still undefined because your data(...) code hasn't run yet.
Gulp plugins work like this:
You invoke the plugin function and pass it any necessary parameter. In your case rename(ext).
The plugin function returns a duplex stream
You pass that duplex stream to .pipe()
Only after all .pipe() calls have been executed does you stream start running.
So rename(ext) is a function call that is used to construct the stream itself. Only after the stream has been constructed can the stream start running.
However you set value of ext only once the stream is running. You need the value of ext before that when you are constructing the stream.
The easiest solution in your case is to simply do the renaming manually in your data(...) function instead of relying on gulp-rename. You can use the node.js built-in path module for that (which is what gulp-rename uses under the hood):
var path = require('path');
gulp.task('metalsmith', function(){
return gulp.src(CONTENT_DIR)
.pipe(frontMatter()).on("data", function(file){
lodash.assign(file, file.frontMatter);
delete file.frontMatter;
})
.pipe(data(function(file){
var ext;
if(typeof file.filetype === 'undefined'){
ext = '.html';
}else{
ext = file.filetype;
}
var parsedPath = path.parse(file.path);
file.path = path.join(parsedPath.dir, parsedPath.name + ext);
}))
.pipe(gulp.dest(BUILD_DIR))
});
Alternatively you could also use gulp-foreach as I suggest in this answer to a similar question. However that's not really necessary in your case since you're already accessing the file object directly in data(...).
EDIT: for completeness sake here's a version using gulp-foreach:
var foreach = require('gulp-foreach');
gulp.task('metalsmith', function(){
return gulp.src(CONTENT_DIR)
.pipe(frontMatter()).on("data", function(file){
lodash.assign(file, file.frontMatter);
delete file.frontMatter;
})
.pipe(foreach(function(stream, file){
var ext;
if(typeof file.filetype === 'undefined'){
ext = {extname: '.html'};
}else{
ext = {extname: file.filetype};
}
return stream.pipe(rename(ext));
}))
.pipe(gulp.dest(BUILD_DIR))
});
#Sven's answer looks like the way to go, since you're hoping for something generalizable to other sites and since you can't drop the gulp-front-matter pipe.
Just to finish going over the code you had: Even
var ext = {extname: '.html'};
…
.pipe(rename(ext))
…
wouldn't work: gulp-rename would say "I don't know how to deal with the kind of data I was passed." You'd need to do
var ext = '.html';
…
.pipe(rename({extname: ext})
…
(or ext = 'html' and {extname: '.' + ext}, for example if the "html" string was being pulled from somewhere else and didn't have the ".")
As we discussed in chat, I was hoping to find that your php and html could be easily distinguished by file name or path. Since all your php files are in /blog you could use gulp-filter. Probably in the end it's not the best solution for this scenario, but it's a useful tool to know about so here's what it would look like:
var gulp = require('gulp'),
gutil = require('gulp-util'),
lodash = require('lodash'),
data = require('gulp-data'),
filesize = require('gulp-filesize'),
filter = require('gulp-filter'), // ADDED
frontMatter = require('gulp-front-matter'),
rename = require('gulp-rename'),
util = require('util');
gulp.task('metalsmith', function() {
const filterPHP = filter('blog/**/*', { restore: true });
const filterHTML = filter('!blog/**/*', { restore: true });
return gulp.src(CONTENT_DIR)
//---- if the only purpose of this particular gulp-front-matter pipe was to support the extension assignment, you could drop it
.pipe(frontMatter()).on("data", function(file) { //
lodash.assign(file, file.frontMatter); //
delete file.frontMatter; //
}) //
//---------
.pipe(filterPHP) // narrow down to just the files matched by filterPHP
.pipe(rename({ extname: '.php' }))
.pipe(filterPHP.restore) // widen back up to the full gulp.src
.pipe(filterHTML) // narrow down to just the files matched by filterHTML
.pipe(rename({ extname: '.html' }))
.pipe(filterHTML.restore) // widen back up
.pipe(gulp.dest(BUILD_DIR))
});
I want to select a couple of files, and generate concatenated files from them. And also want to concatenate my changed files.
This is my assets tree:
assets
- css
- js
- sass
- _midia_queries.scss
- media-combine.scss
- query.scss
So, I want to generate maximum.css(./assets/css/media-combine.min.css) and medium.css(./assets/css/media-combine.min.css, ./assets/css/query.min.css).
Both .min files are been generated by my css task. See below:
gulp.task('css', function (done) {
gulp.src(paths.sass.src)
.pipe(plugins.changed('dist'))
.pipe(plugins.sass())
.pipe(plugins.combineMediaQueries())
.pipe(plugins.autoprefixer())
.pipe( isProduction ? plugins.minifyCss() : gutil.noop() )
.pipe( isProduction ? plugins.rename({ suffix: ".min" }) : gutil.noop() )
.pipe(gulp.dest(paths.css.dest))
.on("end", done);
});
And my concat-css task is using my bad approach of multiple generation files:
gulp.task('concat-css', ['css'], function (done) {
concatType("css", done);
});
function concatType (type, done) {
if (isProduction) {
var task = gulp;
filesToConcat[type].forEach(function (value) {
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
gutil.log('File', gutil.colors.green(value.compiled), 'is beign concatenated');
task = gulp.src(value.files)
.pipe(plugins.concat(value.compiled))
.pipe(gulp.dest(paths[type].dest));
gutil.log('Ready to go!');
});
task.on("end", done);
}
}
Update
When I type gulp on my console/terminal, it runs perfectly my css and concat-css tasks, but, when the watch tasks is triggered by a file change, both run, but, only css works. My .min files are updated, but, my concatenated files are not.
Any ideas to approach this? Thanks.
Sorry, it was a logic problem.
At my concatType function, when I was setting up the path for the targeted files, but, when it was running on watch the path was set a second time, like this:
./assets/css/./assets/css/query.min.css
It would never work. So, I've changed this:
value.files.forEach(function (file, fileIndex) {
value.files[fileIndex] = paths[type].dest + file;
});
task = gulp.src(value.files)
For this:
files = [];
value.files.forEach(function (file) {
files.push(paths[type].dest + file);
});
task = gulp.src(files)
Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().
I'm new to gulp, but I'm wondering if its possible to iterate through directories in a gulp task.
Here's what I mean, I know a lot of the tutorials / demos show processing a bunch of JavaScript files using something like "**/*.js" and then they compile it into a single JavaScript file. But I want to iterate over a set of directories, and compile each directory into it's own JS file.
For instance, I have a file structure like:
/js/feature1/something.js
/js/feature1/else.js
/js/feature1/foo/bar.js
/js/feature1/foo/bar2.js
/js/feature2/another-thing.js
/js/feature2/yet-again.js
...And I want two files: /js/feature1/feature1.min.js and /js/feature2/feature2.min.js where the first contains the first 4 files and the second contains the last 2 files.
Is this possible, or am I going to have to manually add those directories to a manifest? It would be really nice to pragmatically iterate over all the directories within /js/.
Thanks for any help you can give me.
-Nate
Edit: It should be noted that I don't only have 2 directories, but I have many (maybe 10-20) so I don't really want to write a task for each directory. I want to handle each directory the same way: get all of the JS inside of it (and any sub-directories) and compile it down to a feature-based minified JS file.
There's an official recipe for this: Generating a file per folder
var fs = require('fs');
var path = require('path');
var merge = require('merge-stream');
var gulp = require('gulp');
var concat = require('gulp-concat');
var rename = require('gulp-rename');
var uglify = require('gulp-uglify');
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('scripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.js'))
// concat into foldername.js
.pipe(concat(folder + '.js'))
// write to output
.pipe(gulp.dest(scriptsPath))
// minify
.pipe(uglify())
// rename to folder.min.js
.pipe(rename(folder + '.min.js'))
// write to output again
.pipe(gulp.dest(scriptsPath));
});
// process all remaining files in scriptsPath root into main.js and main.min.js files
var root = gulp.src(path.join(scriptsPath, '/*.js'))
.pipe(concat('main.js'))
.pipe(gulp.dest(scriptsPath))
.pipe(uglify())
.pipe(rename('main.min.js'))
.pipe(gulp.dest(scriptsPath));
return merge(tasks, root);
});
You could use glob to get a list of directories and iterate over them, using gulp.src to create a separate pipeline for each feature. You can then return a promise which is resolved when all of your streams have ended.
var fs = require('fs');
var Q = require('q');
var gulp = require('gulp');
var glob = require('glob');
gulp.task('minify-features', function() {
var promises = [];
glob.sync('/js/features/*').forEach(function(filePath) {
if (fs.statSync(filePath).isDirectory()) {
var defer = Q.defer();
var pipeline = gulp.src(filePath + '/**/*.js')
.pipe(uglify())
.pipe(concat(path.basename(filePath) + '.min.js'))
.pipe(gulp.dest(filePath));
pipeline.on('end', function() {
defer.resolve();
});
promises.push(defer.promise);
}
});
return Q.all(promises);
});
I am trying myself to get how streams work in node.
I made a simple example for you, on how to make a stream to filter folders and start a new given stream for them.
'use strict';
var gulp = require('gulp'),
es = require('event-stream'),
log = require('consologger');
// make a simple 'stream' that prints the path of whatever file it gets into
var printFileNames = function(){
return es.map(function(data, cb){
log.data(data.path);
cb(null, data);
});
};
// make a stream that identifies if the given 'file' is a directory, and if so
// it pipelines it with the stream given
var forEachFolder = function(stream){
return es.map(function(data, cb){
if(data.isDirectory()){
var pathToPass = data.path+'/*.*'; // change it to *.js if you want only js files for example
log.info('Piping files found in '+pathToPass);
if(stream !== undefined){
gulp.src([pathToPass])
.pipe(stream());
}
}
cb(null, data);
});
};
// let's make a dummy task to test our streams
gulp.task('dummy', function(){
// load some folder with some subfolders inside
gulp.src('js/*')
.pipe(forEachFolder(printFileNames));
// we should see all the file paths printed in the terminal
});
So in your case, you can make a stream with whatever you want to make with the files in a folder ( like minify them and concatenate them ) and then pass an instance of this stream to the forEachFolder stream I made. Like I do with the printFileNames custom stream.
Give it a try and let me know if it works for you.
First, install gulp-concat & gulp-uglify.
$ npm install gulp-concat
$ npm install gulp-uglify
Next, do something like:
//task for building feature1
gulp.task('minify-feature1', function() {
return gulp.src('/js/feature1/*')
.pipe(uglify()) //minify feature1 stuff
.pipe(concat('feature1.min.js')) //concat into single file
.pipe(gulp.dest('/js/feature1')); //output to dir
});
//task for building feature2
gulp.task('minify-feature2', function() { //do the same for feature2
return gulp.src('/js/feature2/*')
.pipe(uglify())
.pipe(concat('feature2.min.js'))
.pipe(gulp.dest('/js/feature2'));
});
//generic task for minifying features
gulp.task('minify-features', ['minify-feature1', 'minify-feature2']);
Now, all you have to do to minify everything from the CLI is:
$ gulp minify-features
I had trouble with the gulp recipe, perhaps because I'm using gulp 4 and/or because I did not want to merge all my folders' output anyway.
I adapted the recipe to generate (but not run) an anonymous function per folder and return the array of functions to enable them to be processed by gulp.parallel - in a way where the number of functions I would generate would be variable. The keys to this approach are:
Each generated function needs to be a function or composition (not a stream). In my case, each generated function was a series composition because I do lots of things when building each module folder.
The array of functions needs to passed into my build task using javascript apply() since every member of the array needs to be turned into an argument to gulp.parallel in my case.
Excerpts from my function that generates the array of functions:
function getModuleFunctions() {
//Get list of folders as per recipe above - in my case an array named modules
//For each module return a function or composition (gulp.series in this case).
return modules.map(function (m) {
var moduleDest = env.folder + 'modules/' + m;
return gulp.series(
//Illustrative functions... all must return a stream or call callback but you can have as many functions or compositions (gulp.series or gulp.parallel) as desired
function () {
return gulp.src('modules/' + m + '/img/*', { buffer: false })
.pipe(gulp.dest(moduleDest + '/img'));
},
function (done) {
console.log('In my function');
done();
}
);
});
}
//Illustrative build task, running two named tasks then processing all modules generated above in parallel as dynamic arguments to gulp.parallel, the gulp 4 way
gulp.task('build', gulp.series('clean', 'test', gulp.parallel.apply(gulp.parallel, getModuleFunctions())));
`