I'm writing a new kind of structure and scalability for an express application.
Issue:
I don't like to define each route on app.js file.
Solution:
Make something automated in order to load the routes automatically.
So far, I have this code on index.js (routes folder) file:
var fs = require('fs');
module.exports = function(app) {
recursive_require(__dirname, app);
function recursive_require(directory, app) {
fs.readdirSync(directory).forEach(function(file) {
if (fs.lstatSync(directory + '/' + file + '/').isDirectory()) {
var has_no_js_files = false;
directory = directory + '/' + file + '/';
console.log('Scanning recursively on ' + directory);
// We run across the directory to check if there are any js files.
fs.readdirSync(directory).forEach(function(file) {
console.log('Reading file/directory ' + file);
if (file.match(/\.js$/g)) {
has_no_js_files = true;
console.log('Found js files on directory ' + directory);
}
});
// If the folder has no js files, we take in mind that there are other folders inside
// so we scan the folder recursively.
if (!has_no_js_files) {
console.log('No JS files found on ' + directory + ' going to scan recursively');
recursive_require(directory.substr(0, directory.lastIndexOf('/')));
} else {
// Otherwise, we require the directory taking in mind that we have just js files.
console.log('Found JS files on ' + directory + ', require them');
require(directory)(app);
}
}
});
}
}
Now, This seems to work but I have a bit of an issue..
My idea is to have everything on folders, so say, this structure:
routes
admin
posts
index.js <- handles add, remove, edit for posts
users
index.js <- handles add, remove, edit for users
blog
posts
index.js <- handles show for frontend
index.js <- Loads all of the files recursively.
Now, I have a bit of an issue with this code...
I'm having this error:
PS C:\Users\bony-_000\Documents\GitHub\node-blog> node app
Scanning recursively on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/
Reading file/directory posts
No JS files found on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/ going to scan recursively
Scanning recursively on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/
Reading file/directory index.js
Found js files on directory C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/
Found JS files on C:\Users\bony-_000\Documents\GitHub\node-blog\routes/admin/posts/, require them
C:\Users\bony-_000\Documents\GitHub\node-blog\routes\admin\posts\index.js:2
app.get('/admin/posts/add', function(req, res) {
^
TypeError: Cannot call method 'get' of undefined
Though I'm sending the app var...
Any help will be much appreciated, also, feel free to use the code.
I've solved all the issues and now I have my structured MVC Online!
This is the code for anyone to use:
var fs = require('fs'),
required_files = [];
module.exports = function(app) {
recursive_require(__dirname, __dirname, app);
function recursive_require(directory, base_dir, app) {
fs.readdirSync(directory).forEach(function (input) {
var next_directory = directory + '/' + input + '/';
// If we are on the base dir, we ignore the index.js file
if (!(required_files.indexOf(base_dir + '/index') > -1)) {
required_files.push(base_dir + '/index');
}
// Check if it's a directory
if (fs.lstatSync(next_directory).isDirectory()) {
// We require it recursively
console.log('Reading directory ' + next_directory);
recursive_require(next_directory.substr(0, next_directory.lastIndexOf('/')), base_dir, app);
} else {
// We require all (except the index.js file if the var is set to true) js files on folder
require_files(directory, app);
return;
}
});
}
function require_files(directory, app) {
fs.readdir(directory, function(err, files) {
files.forEach(function(file) {
if (file.match(/\.js$/g)) {
var file_path = directory + '/' + file;
file_path = file_path.substr(0, file_path.indexOf('.js'));
if (required_files.indexOf(file_path) == -1) {
required_files.push(file_path);
require(file_path)(app);
}
}
});
});
return;
}
}
Any suggestions are welcome.
Related
I wanted to delete multiple files which are ending with .pdf under the current directory. Suppose I have 3 different pdf files, 1 image file, and one text file, so in these, I want to delete those 3 different pdf files only.
What I have tried.
1st method
fs.unlinkSync('./'+*+pdfname); -> I know this does not make sense
2nd method
try {
var files = (here the list of files should come. However i am failing to get those);
var path="./"
files.forEach(path => fs.existsSync(path) && fs.unlinkSync(path))
} catch (err) {
console.error("not exist")
}
Any different approaches would be appreciated.
Update for the solution:
I have got the solution for my requirement, I just wanted my function to delete all the pdf files and function to be synchronous. However 99% of the solution given by the below author -> https://stackoverflow.com/a/66558251/11781464
fs.readdir is asynchronous and just needs to make it synchronous fs.readdirSync.
below is the updated code and all the credit should go to the author https://stackoverflow.com/a/66558251/11781464.
Updated code
try {
const path = './'
// Read the directory given in `path`
fs.readdirSync(path).forEach((file) => {
// Check if the file is with a PDF extension, remove it
if (file.split('.').pop().toLowerCase() === 'pdf') {
console.log(`Deleting file: ${file}`);
fs.unlinkSync(path + file)
}
});
console.log("Deleted all the pdf files")
return true;
} catch (err) {
console.error("Error in deleting files",err);
}
You can read the directory using fs.readdir and then check for PDF files and delete them. Like this:
fs = require('fs');
try {
path = './'
// Read the directory given in `path`
const files = fs.readdir(path, (err, files) => {
if (err)
throw err;
files.forEach((file) => {
// Check if the file is with a PDF extension, remove it
if (file.split('.').pop().toLowerCase() == 'pdf') {
console.log(`Deleting file: ${file}`);
fs.unlinkSync(path + file)
}
});
});
} catch (err) {
console.error(err);
}
Preliminary Reading
The current working directory of the Node.js process - see update below answer
Path methods
resolve
extName
File System methods
fs.readdirSync
fs.unlinkSync
and classes
dirent
Example
"use strict";
const fs = require('fs');
const path = require('path');
const cwd = process.cwd();
fs.readdirSync( cwd, {withFileTypes: true})
.forEach( dirent => {
if(dirent.isFile()) {
const fileName = dirent.name;
if( path.extname(fileName).toLowerCase() === ".pdf") {
fs.unlinkSync( path.resolve( cwd, fileName));
}
}
});
Notes
untested code
If unlinkSync fails I would assume it returns -1 as per the unlink(2) man page linked in documentation. Personally I would test this using a filename that doesn't exist in cwd.
I believe the {withFileTypes: true} option for readdirSync returns dirent objects with a mime-type value that would allow you to check for files of type application/pdf regardless of extension (not attempted in the example).
Update: path(resolve) adds the current working directory at the beginning of the returned path, when necessary, by default. path.resolve(fileName) will work equally as well as path.resolve(cwd, fileName) in the example.
It seems you know how to delete (unlink) the files - you are asking how to get the file paths?
Try using glob:
const pdfFiles = require("glob").globSync("*.pdf");
Hi here i am attaching tested code for delete all ( only ) .pdf files and not other extension files like .txt , .docs etc from directory.
Note : You can delete any files or directory only from server side.
const fs = require('fs');
const path = require('path')
fs.readdir('../path to directory', (err, files) => {
const pdfFiles = files.filter(el => path.extname(el) === '.pdf')
pdfFiles.forEach(file => {
console.log("Removing File -> ",file);
var filename = "../path to directory/"+file;
fs.unlink(filename,function(err){
if(err) return console.log(err);
console.log('file deleted successfully');
});
});
});
This will gives you a following result in console log.
Removing File -> note.pdf
Removing File -> note2.pdf
file deleted successfully
file deleted successfully
Please feel free to comment any query if have..
Unable to get the .js file with Minium Developer. I have the route like in the example proyect but it doesn't work. What am i doing wrong?
Helper.js
var helperFuntions = {
findExistingDataOnTable : function(cssSelector,query){
var encontrado = $(cssSelector).matchingText(query);
if (encontrado) {
console.log("Superado");
}else{
console.log("No superado");
}
}
};
ProyectoPrueba.js (not working, wrong import)
var helperFuntions = require("modules/Helper/Helper.js");
When(/^Compruebo la existencia de "(.*?)"$/, function (query) {
var cssTable = "\".ym-cbox\"";
helperFuntions.findExistingDataOnTable(cssTable, query);
});
ProyectoPrueba.js (working, no import)
When(/^Compruebo la existencia de "(.*?)"$/, function (query) {
var found= $(".ym-cbox").matchingText(query);
if (found) {
console.log("Superado");
}else{
console.log("No superado");
}
});
Proyect hierarchy
You must exclude the "modules" folder from the require path:
require("Helper/Helper");
Also, the extension ".js" is not required.
Your imports need to be absolute paths unless you're importing modules from a node_modules folder.
Let's say the modules folder is a sibling of the current folder you're importing it from.
In that case require("modules/Helper/Helper.js"); should be require("../modules/Helper/Helper.js");
I'm trying to write a gulp task to build javascript file and concatenate a single file for each folder, including the root folder.
I have found this solution: https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md
If you have a set of folders, and wish to perform a set of tasks on
each, for instance...
/scripts
/scripts/jquery/*.js
/scripts/angularjs/*.js
...and want to end up with...
/scripts
/scripts/jquery.min.js
/scripts/angularjs.min.js
However, this only builds *.js file for all the subfolders inside scripts folder. I'm trying to build the *.js file inside the root folder scripts, i.e. my expected output will be:
/scripts.min.js
/scripts/jquery.min.js
/scripts/angularjs.min.js
I'm new to node so I'm confused now how to achieve that. Really appreciate your help on this. Thanks alot.
You can create a separate task (baseScripts) for creating the minified scripts for the base directory. Then create another task (allScripts) that runs both the baseScripts and subScripts tasks.
var scriptsPath = 'src/scripts';
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('allScripts', ['baseScripts', 'subScripts']);
gulp.task('subScripts', function() {
var folders = getFolders(scriptsPath);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/*.js'))
.pipe(uglify())
.pipe(rename(folder + '.min.js'))
.pipe(gulp.dest(scriptsPath));
});
return merge(tasks);
});
gulp.task('baseScripts', function(){
return gulp.src(scriptsPath + '/*.js')
.pipe(uglify())
.pipe(concat('scripts.min.js'))
.pipe(gulp.dest('src'));
});
After a day, I have come up with some sort of modifying as below.
var paths = {
js: {
folder: 'dev/assets/js'
}
};
gulp.task('js', function() {
var folders = getFolders(paths.js.folder);
var tasks = folders.map(function(folder) {
return gulp.src(path.join(paths.js.folder, folder.path, '/*.js'))
.pipe(uglify())
.pipe(concat(folder.name + '.min.js'))
.pipe(gulp.dest(paths.js.dest));
});
merge(tasks);
browserSync.reload();
});
var getFolders = function(dir) {
var folders = [{path:'',name:'app'}];
var folder = fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
for (var i=0; i<folder.length;i++) {
folders.push({path: folder[i], name: folder[i]});
}
return folders;
};
I have separated the directories and the names for combined script into 2 properties of an object. So I don't need to have different tasks for the root folder and subfolders.
Please feel free to give your comments on my approach :)
Under my assets/ folder, I have numerous subfolders, each containing an arbitrary number of images, like so:
assets/article1/
assets/article2/
I'm trying to write a gulp task to locate all .jpg images within and generate their thumbnail versions, to be saved in a thumbs/ subfolder within the folder where each file resides:
assets/article1/ # original jpg images
assets/article1/thumbs/ # thumbnail versions of above..
assets/article2/
assets/article2/thumbs/
I've been trying various approaches but no luck. The closest I've come is:
gulp.task('thumbs', function () {
return gulp.src( './assets/**/*.jpg' )
.pipe( imageResize( { width: 200 } ) )
.pipe( gulp.dest( function( file ) { return file.base + '/thumbs/'; } ) );
});
However, this creates a single thumbs\ folder at the root of assets\
assets/article1/
assets/article2/
assets/thumbs/article1/
assets/thumbs/article2/
Is there a good info on the paths and wildcards anywhere? Clearly I'm not handling it well..
You could use path.dirname for that: http://nodejs.org/api/path.html#path_path_dirname_p
// require core module
var path = require('path');
gulp.task('thumbs', function () {
return gulp.src( './assets/**/*.jpg' )
.pipe( imageResize( { width: 200 } ) )
.pipe( gulp.dest( function( file ) { return path.join(path.dirname(file.path), 'thumbs'); } ) );
});
Here's what worked for me given the following directory structure (I simplified a bit to focus on just getting the files in the right place)
assets/article1/1.jpg
assets/article2/2.jpg
with a desired outcome of
assets/article1/1.jpg
assets/article1/thumbs/1.jpg
assets/article2/2.jpg
assets/article2/thumbs/2.jpg
Here's what worked for me (modified from this recipe https://github.com/gulpjs/gulp/blob/master/docs/recipes/running-task-steps-per-folder.md)
var gulp = require('gulp'),
rename = require('gulp-rename'),
path = require('path'),
fs = require('fs');
var scriptsPath = 'assets'; // folder to process
function getFolders(dir) {
return fs.readdirSync(dir)
.filter(function(file) {
return fs.statSync(path.join(dir, file)).isDirectory();
});
}
gulp.task('thumbs', function() {
var folders = getFolders(scriptsPath);
return folders.map(function(folder) {
return gulp.src(path.join(scriptsPath, folder, '/**/*.jpg'))
.pipe(rename({dirname: folder + '/thumbs/'}))
.pipe(gulp.dest(scriptsPath));
});
});
I've found another approach that may be useful for anyone searching this post.
My original goal was very similar to your request, and this approach may be adjusted to almost any particular need.
function scss(cb) {
src('./*/scss/*.scss')
.pipe(sourcemaps.init())
.pipe(sass())
.pipe(rename({extname: '.min.css'}))
.pipe(tap(function(file) {
//Enrich the file with crucial information about it's original path, you can save anything you may need. In my case filename is quite enough. It's important to move this block before any sourcemap write if you've any.
file.name = path.basename(file.path);
}))
.pipe(sourcemaps.write('.'))
.pipe(flatten()) //Remove file relative paths.
.pipe(dest(
function( file ) {
//Reconstruct final path using insight we saved before, and using a clean base path provided by flatten.
return path.join(path.dirname(file.path), file.name.replace(/\.min\..+$/, ''), 'static');
}));
cb();
}
Using phpStorm, I would like to merge multiple JavaScript files into one.
I installed the closure compiler and configured the file watcher to minify each JavaScript file.
Now, I would like to combine all of my JavaScript files into one.
Here's the architecture of my project (a test project to merge js files) :
index.php
js(folder) >
first.js (+first.min.js),
second.js (+second.min.js),
third.js (+third.min.js)
cache (folder)
main.js
I would like to merge (first.min.js, second.min.js, third.min.js) into folder cache > main.js.
Ideally, merging all of the files would happen automatically; I don't want to specify each js file manually.
Can someone explain the arguments I must use to configure my filewatcher?
I used npm plugins concat, minifier and walk.
Here is the script I made :
var walk = require('walk'),
concat = require('concat'),
minifier = require('minifier'),
files = [];
var JS_SOURCES_DIR = 'app/components',
JS_LAST_FILE = 'app/app.module.js',
JS_DIR = 'app/',
JS_FULL_FILE = JS_DIR + 'app.js',
JS_MINIFIED_FILE = 'app.min.js',
JS_MINIFIED_FILE_PATH = JS_DIR + JS_MINIFIED_FILE;
var walker = walk.walk(JS_SOURCES_DIR, {followLinks: false});
walker.on('file', (root, stat, next) => {
var fullpath = root.replace(/\\/g, '/');
var regex = new RegExp(/.+\.js$/);
if (stat.name.match(regex)) {
files.push(fullpath + '/' + stat.name);
}
next();
});
walker.on('end', function () {
files.push(JS_LAST_FILE);
files.forEach(function (item) {
console.log(item);
})
concat(files, JS_FULL_FILE).then((result) => {
minifier.minify(JS_FULL_FILE, {output: JS_MINIFIED_FILE_PATH});
console.log('\n[OK] ' + JS_MINIFIED_FILE + ' sucessfully updated');
}, function (error) {
console.log('[ERROR] JS concat failure: ' + error.message);
});
});
minifier.on('error', function (error) {
console.log('\n[ERROR] JS minify error: ' + error);
});
First with walker, files are added to var "files". I used JS_LAST_FILE for angularjs concerns, as I build the module and add all the dependencies in that file. Then files are concatenated to JS_FULL_FILE. Finally JS_FULL_FILE is minified to JS_MINIFIED_FILE.
I do not use a watcher to trigger the concat script when a file is updated.
Instead when I work locally, I don't concatenate files but I simply add them in the head part of the page using a homemade function that uses php scandir().