NodeJS : Dynamic import modules - javascript

Is there any way in JS to import dynamically some modules ?
For example, this is my architecture :
| - index.js
| - modules
| - SomeModule
| - router.js
| - controller.js
| - SomeOtherModule
| - SubModule
| - router.js
| - controller.js
| - controller.js
My goal is to import all the router.js modules in the index.js file so I was thinking about something like this :
import fs from "fs"
import path from "path"
function scanDirForRouters (dirPath, name = "") {
let routers = []
const files = fs.readdirSync(dirPath)
for(const file of files) {
const isDirectory = fs.statSync(path.join(dirPath, file)).isDirectory()
if(isDirectory) {
routers = [...routers, ...scanDirForRouters(path.join(dirPath, file), name + file)]
}
else if(file === "router.js") {
routers.push(`import ${name}Router from ${path.join(dirPath, file)}`)
}
}
return routers
}
let allRouters = scanDirForRouters(path.join(path.dirname("."), "modules"))
So if I do a console.log(allRouters) it gives me :
[
'import SomeModuleRouter from modules/SomeModule/Router.js',
'import SomeOtherModuleSubModuleRouter from modules/SomeOtherModule/SubModule/Router.js'
]
So i wish there is a way to execute these command in my script now ... or maybe another way to do it?
Thanks a lot

Thanks to #joprocoorp i found the answer using directly the import function in the loop.
This is my code :
import fs from "fs"
import path from "path"
async function scanDirForRouters (dirPath, name = "") {
let routers = []
const files = fs.readdirSync(dirPath)
for(const file of files) {
const isDirectory = fs.statSync(path.join(dirPath, file)).isDirectory()
if(isDirectory) {
routers = [...routers, ...(async scanDirForRouters(path.join(dirPath, file), name + file))]
}
else if(file === "router.js") {
const router = await import(`./${path.join(dirPath, file)}`)
routers.push(router)
}
}
return routers
}
let allRouters = await scanDirForRouters(path.join(path.dirname("."), "modules"))
then I can make a loop on the routers and do whatever i want :)

Please, check eloader https://www.npmjs.com/package/eloader
It's load entire folders(recursive) or object instance.
const eloader = require('eloader');
const express = require('express');
let options = {debug: false};
eloader.addObject('options', options) //New methods
.addServices('services') //This will be loaded first.
.addRoutes('routes') //This third
.run('loginRoute'); //This second

You could use require instead. I implemented it by replacing the routers.push line :
function scanDirForRouters (dirPath, name = "") {
let routers = []
const files = fs.readdirSync(dirPath)
for(const file of files) {
const isDirectory = fs.statSync(path.join(dirPath, file)).isDirectory()
if(isDirectory) {
routers = [...routers, ...scanDirForRouters(path.join(dirPath, file), name + file)]
}
else if(file === "router.js") {
require(`${path.join(dirPath, file)}`)
}
}
return routers
}
let allRouters = scanDirForRouters(path.join(path.dirname("."), "modules"))

Related

Is there a way to consolidate the imports/requires into one line in javascript?

I have many different modules in a project, each module has it's own folder, within each module, there are 3 files:
index.js
index.test.js
data.js
Every module has these files, same file names and different content.
For the index.test.js files, the import statements are all the same for all index.test.js files in all modules, look like this:
const runAnalysis = require('./index');
const { data } = require('./data');
const { formatData } = require('utils');
const formattedData = formatData(data);
Since these import statements are the same for all index.test.js files, is there a way to consolidate these into one line, so it doesn't have to be repeated over and over in every module?
Is there a way to put these import statements in a header file, and then each index.test.js can just import the header file?
Not sure if this is efficient or recommended, but it works.
Create a util js file and it can live at the project root directory, let's name it import-util.js with the following code.
const { formatData } = require('utils');
const getMyImports = (dirname) => {
const runAnalysis = require(`${dirname}/index`);
const { data } = require(`${dirname}/data`);
const formattedData = formatData(data);
return {
runAnalysis,
formattedData
}
};
exports.getMyImports = getMyImports;
Then this file can be used like this to get the runAnalysis and formattedData
const { runAnalysis, formattedData } = require('../import-util').getMyImports(__dirname);
__dirname is the trick to make it work, it provides the directory path for the index.js and data.js to be imported.
This single line can now be used in every module folder, given the folder contains the index.js and data.js, it works the same as the following 4 lines of code.
const runAnalysis = require('./index');
const { data } = require('./data');
const { formatData } = require('utils');
const formattedData = formatData(data);

loop in a module export javascript/sequelize

I have an index.js script that contains sequelize models
here is the tree structure of my folder
models/
files/
xml_files/
csv_files/
txt_files/
index.js
server.js
this my index.js code :
const generate_files = require('./files')
const File = generate_files(sequelize, DataTypes)
const generate_xml_files = require('./xml_files')
const Xml_File = generate_xml_files(sequelize, DataTypes)
const generate_csv_files = require('./csv_files')
const Csv_File = generate_csv_files(sequelize, DataTypes)
const generate_txt_files = require('./txt_files')
const Txt_File = generate_txt_files(sequelize, DataTypes)
module.exports = {
Files, Xml_File, Csv_File, Txt_File
}
in `` server.js ``` I imported each model like this :
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
Now in server.js I want to get the name of the module sequelize then I check if the name of the module matches the name of the table like this :
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
if (Files.name == TableName){
Files.findAll()
}
if (Xml_File.name == TableName){
Xml_File.findAll()
}
if (Csv_File.name == TableName){
Csv_File.findAll()
}
....
how can I avoid doing tests every time ?
You can do:
const Example = require('./index')
Example.Files
As for making a loop, I'm not sure you can do that, but you can do:
export const File = generate_files(sequelize, DataTypes)
Which is the same thing as:
module.exports = {File}
You can create an object and loop through the entries, like so.
var exports = {
'File': './files',
'Xml_File': './xml_files',
'Csv_File': './csv_files',
'Txt_File': './txt_files'
};
var e = {};
Object.entries(exports).forEach(ex=>{
e[ex[0]] = require(ex[1])(sequelize, DataTypes);
});
module.exports = e;
Then you'll want to import them from this index file...
const {Files, Xml_File, Csv_File, Txt_File} = require('./models/index.js')
Per your update..
const Models = require('./models/index.js');
Object.entries(Models).forEach(model=>{
if (model.name == TableName){
model.findAll();
}
}
To simplify your index.js you could also do it like this:
module.exports = {
Files: require('./files')(sequelize, DataTypes),
Xml_File: require('./xml_files')(sequelize, DataTypes),
Csv_File: require('./csv_files')(sequelize, DataTypes),
Txt_File: require('./txt_files')(sequelize, DataTypes)
}
it's easier to read, at least for me.
Another way is to loop over the dir so you never need to change the code in index.js even if you add another folder like /xxx_files
const Fs = require('fs');
var dir = Fs.readdirSync(__dirname, { withFileTypes: true });
var exports = {};
dir.forEach(d => {
if (d.isDirectory())
exports[d.name] = require('./' + d)(sequelize, DataTypes);
});
// !!!!!!! the names of the exports will be names of the directories
module.exports = exports;
And to avoid the checks if (Files.name == TableName)
all you need is to not import like this:
const {Files, Xml_File, Csv_File, Txt_File} = require('./models')
but rather like this:
const models = require('./models')
// models = { files, xml_files, csv_files, txt_files }
then you can do this:
var db = models[TableName] // assuming TableName is files, xml_files etc.
if (db)
doSomething();

Dynamic reloading only part of back-end with dynamic import

In my node backend, I have the following file structure:
project
|-- expensive
| |-- index.ts
|-- files
| |-- foo.ts
| |-- bar.ts
| `-- baz.ts
|-- tsconfig.json
|-- package.json
`-- index.ts
I want to reload only part of my project (./files/*) without having to restart the entire thing.
What I did was use dynamic import:
// inside index.ts...
const client = new Client() as Client;
client.expensive = new Map();
client.files = new Map()
// load each file under the `files` dir
client.loadFiles = async () => {
const fileNames = readdirSync('./files/')
fileNames.forEach(async name => {
const module = await import('./files/' + name); // dynamic import
client.files.set(module.default.name, module.default)
})
}
// client.loadExpensive...
// load everything
async function loadAll(reload: boolean = false) {
await client.loadFiles(reload);
await client.loadExpensive(reload);
}
loadAll();
startApp();
Then the reload function would be:
// reload all or the specified dir
client.reload = async (dir?: string | undefined) => {
if (dir) {
dir = dir.replace(/^\w/, c => c.toUpperCase()); // capitalize
if (client.hasOwnProperty('load'+dir)) client['load'+dir]();
else console.error('no such dir')
} else {
await loadAll();
}
}
Problem is while the project loads and reload without error.
Adding or removing files under .files/*, then calling .reload() does not seem produce any change. Why is that?
Prior to converting to TS, I used require and cache busting:
// additional reload arg
client.loadFiles = async (reload) => {
const fileNames = readdirSync('./files/')
fileNames.forEach(async name => {
// delete require cache then require the file
if (reload) delete require.cache[require.resolve(`./${dir}/${name}.ts`)];
client.files.set(module.default.name, module.default)
})
}
So i looked at the transpiled js code, it looks like import() use require underneath.
const module = yield Promise.resolve().then(() => __importStar(require('./files/' + name)));
What am I doing wrong? or is this even a good pattern to follow.
Because import() compiles to require() (roughly), you'll need to delete the require cache like you did before you used TS.
client.loadFiles = async (reload: boolean) => {
const fileNames = readdirSync('./files/')
fileNames.forEach(async name => {
// Note that if you're using ts-node the extension will be ts not js
if (reload) delete require.cache[require.resolve(`./files/${name}.js`)]
const module = await import('./files/' + name); // dynamic import
client.files.set(module.default.name, module.default)
})
}

Does having the same `require` in multiple files increase runtime

So I'm planning to separate my functions into separate files and then import them into a single index.js which then becomes the main exporter. So I'm wondering if having something like var bcrypt = require('bcrypt') in several of my files be slower than just having it in one file.
Here's how I'm planning to group and export in index.js
const fs = require('fs');
const path = require('path')
const modules = {}
const files = fs.readdirSync(__dirname)
files.forEach(file => {
if (file === 'index.js') return
let temp = require(path.join(__dirname, file))
for (let key in temp) {
modules[key] = temp[key]
}
});
module.exports = modules
As an example of what I mean:
file1.js
var bcrypt = require("bcrypt");
module.exports.file1test = "hi"
file2.js
var bcrypt = require("bcrypt");
module.exports.file2test = "bye"
No, it does not. Whenever a module is required for the first time, the module's code runs, assigns something to its exports, and those exports are returned. Further requires of that module simply reference those exports again. The logic is similar to this:
const importModule = (() => {
const exports = {};
return (name) => {
if (!exports[name]) exports[name] = runModule(name);
return exports[name];
};
})();
So, multiple imports of the same module is no more expensive than referencing an object multiple times.

How do you get a list of the names of all files present in a directory in Node.js?

I'm trying to get a list of the names of all the files present in a directory using Node.js. I want output that is an array of filenames. How can I do this?
You can use the fs.readdir or fs.readdirSync methods. fs is included in Node.js core, so there's no need to install anything.
fs.readdir
const testFolder = './tests/';
const fs = require('fs');
fs.readdir(testFolder, (err, files) => {
files.forEach(file => {
console.log(file);
});
});
fs.readdirSync
const testFolder = './tests/';
const fs = require('fs');
fs.readdirSync(testFolder).forEach(file => {
console.log(file);
});
The difference between the two methods, is that the first one is asynchronous, so you have to provide a callback function that will be executed when the read process ends.
The second is synchronous, it will return the file name array, but it will stop any further execution of your code until the read process ends.
IMO the most convenient way to do such tasks is to use a glob tool. Here's a glob package for node.js. Install with
npm install glob
Then use wild card to match filenames (example taken from package's website)
var glob = require("glob")
// options is optional
glob("**/*.js", options, function (er, files) {
// files is an array of filenames.
// If the `nonull` option is set, and nothing
// was found, then files is ["**/*.js"]
// er is an error object or null.
})
If you are planning on using globby here is an example to look for any xml files that are under current folder
var globby = require('globby');
const paths = await globby("**/*.xml");
The answer above does not perform a recursive search into the directory though. Here's what I did for a recursive search (using node-walk: npm install walk)
var walk = require('walk');
var files = [];
// Walker options
var walker = walk.walk('./test', { followLinks: false });
walker.on('file', function(root, stat, next) {
// Add this file to the list of files
files.push(root + '/' + stat.name);
next();
});
walker.on('end', function() {
console.log(files);
});
As of Node v10.10.0, it is possible to use the new withFileTypes option for fs.readdir and fs.readdirSync in combination with the dirent.isDirectory() function to filter for filenames in a directory. That looks like this:
fs.readdirSync('./dirpath', {withFileTypes: true})
.filter(item => !item.isDirectory())
.map(item => item.name)
The returned array is in the form:
['file1.txt', 'file2.txt', 'file3.txt']
Get files in all subdirs
const fs=require('fs');
function getFiles (dir, files_){
files_ = files_ || [];
var files = fs.readdirSync(dir);
for (var i in files){
var name = dir + '/' + files[i];
if (fs.statSync(name).isDirectory()){
getFiles(name, files_);
} else {
files_.push(name);
}
}
return files_;
}
console.log(getFiles('path/to/dir'))
Here's a simple solution using only the native fs and path modules:
// sync version
function walkSync(currentDirPath, callback) {
var fs = require('fs'),
path = require('path');
fs.readdirSync(currentDirPath).forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walkSync(filePath, callback);
}
});
}
or async version (uses fs.readdir instead):
// async version with basic error handling
function walk(currentDirPath, callback) {
var fs = require('fs'),
path = require('path');
fs.readdir(currentDirPath, function (err, files) {
if (err) {
throw new Error(err);
}
files.forEach(function (name) {
var filePath = path.join(currentDirPath, name);
var stat = fs.statSync(filePath);
if (stat.isFile()) {
callback(filePath, stat);
} else if (stat.isDirectory()) {
walk(filePath, callback);
}
});
});
}
Then you just call (for sync version):
walkSync('path/to/root/dir', function(filePath, stat) {
// do something with "filePath"...
});
or async version:
walk('path/to/root/dir', function(filePath, stat) {
// do something with "filePath"...
});
The difference is in how node blocks while performing the IO. Given that the API above is the same, you could just use the async version to ensure maximum performance.
However there is one advantage to using the synchronous version. It is easier to execute some code as soon as the walk is done, as in the next statement after the walk. With the async version, you would need some extra way of knowing when you are done. Perhaps creating a map of all paths first, then enumerating them. For simple build/util scripts (vs high performance web servers) you could use the sync version without causing any damage.
Using Promises with ES7
Asynchronous use with mz/fs
The mz module provides promisified versions of the core node library. Using them is simple. First install the library...
npm install mz
Then...
const fs = require('mz/fs');
fs.readdir('./myDir').then(listing => console.log(listing))
.catch(err => console.error(err));
Alternatively you can write them in asynchronous functions in ES7:
async function myReaddir () {
try {
const file = await fs.readdir('./myDir/');
}
catch (err) { console.error( err ) }
};
Update for recursive listing
Some of the users have specified a desire to see a recursive listing (though not in the question)... Use fs-promise. It's a thin wrapper around mz.
npm install fs-promise;
then...
const fs = require('fs-promise');
fs.walk('./myDir').then(
listing => listing.forEach(file => console.log(file.path))
).catch(err => console.error(err));
non-recursive version
You don't say you want to do it recursively so I assume you only need direct children of the directory.
Sample code:
const fs = require('fs');
const path = require('path');
fs.readdirSync('your-directory-path')
.filter((file) => fs.lstatSync(path.join(folder, file)).isFile());
Dependencies.
var fs = require('fs');
var path = require('path');
Definition.
// String -> [String]
function fileList(dir) {
return fs.readdirSync(dir).reduce(function(list, file) {
var name = path.join(dir, file);
var isDir = fs.statSync(name).isDirectory();
return list.concat(isDir ? fileList(name) : [name]);
}, []);
}
Usage.
var DIR = '/usr/local/bin';
// 1. List all files in DIR
fileList(DIR);
// => ['/usr/local/bin/babel', '/usr/local/bin/bower', ...]
// 2. List all file names in DIR
fileList(DIR).map((file) => file.split(path.sep).slice(-1)[0]);
// => ['babel', 'bower', ...]
Please note that fileList is way too optimistic. For anything serious, add some error handling.
I'm assuming from your question that you don't want directories names, just files.
Directory Structure Example
animals
├── all.jpg
├── mammals
│ └── cat.jpg
│ └── dog.jpg
└── insects
└── bee.jpg
Walk function
Credits go to Justin Maier in this gist
If you want just an array of the files paths use return_object: false:
const fs = require('fs').promises;
const path = require('path');
async function walk(dir) {
let files = await fs.readdir(dir);
files = await Promise.all(files.map(async file => {
const filePath = path.join(dir, file);
const stats = await fs.stat(filePath);
if (stats.isDirectory()) return walk(filePath);
else if(stats.isFile()) return filePath;
}));
return files.reduce((all, folderContents) => all.concat(folderContents), []);
}
Usage
async function main() {
console.log(await walk('animals'))
}
Output
[
"/animals/all.jpg",
"/animals/mammals/cat.jpg",
"/animals/mammals/dog.jpg",
"/animals/insects/bee.jpg"
];
if someone still search for this, i do this:
import fs from 'fs';
import path from 'path';
const getAllFiles = dir =>
fs.readdirSync(dir).reduce((files, file) => {
const name = path.join(dir, file);
const isDirectory = fs.statSync(name).isDirectory();
return isDirectory ? [...files, ...getAllFiles(name)] : [...files, name];
}, []);
and its work very good for me
Load fs:
const fs = require('fs');
Read files async:
fs.readdir('./dir', function (err, files) {
// "files" is an Array with files names
});
Read files sync:
var files = fs.readdirSync('./dir');
My one liner code:
const fs = require("fs")
const path = 'somePath/'
const filesArray = fs.readdirSync(path).filter(file => fs.lstatSync(path+file).isFile())
its just 2 lines of code:
fs=require('fs')
fs.readdir("./img/", (err,filename)=>console.log(filename))
Image:
Get sorted filenames. You can filter results based on a specific extension such as '.txt', '.jpg' and so on.
import * as fs from 'fs';
import * as Path from 'path';
function getFilenames(path, extension) {
return fs
.readdirSync(path)
.filter(
item =>
fs.statSync(Path.join(path, item)).isFile() &&
(extension === undefined || Path.extname(item) === extension)
)
.sort();
}
My 2 cents if someone:
Just want to list file names (excluding directories) from a local sub-folder on their project
✅ No additional dependencies
✅ 1 function
✅ Normalize path (Unix vs. Windows)
const fs = require("fs");
const path = require("path");
/**
* #param {string} relativeName "resources/foo/goo"
* #return {string[]}
*/
const listFileNames = (relativeName) => {
try {
const folderPath = path.join(process.cwd(), ...relativeName.split("/"));
return fs
.readdirSync(folderPath, { withFileTypes: true })
.filter((dirent) => dirent.isFile())
.map((dirent) => dirent.name.split(".")[0]);
} catch (err) {
// ...
}
};
README.md
package.json
resources
|-- countries
|-- usa.yaml
|-- japan.yaml
|-- gb.yaml
|-- provinces
|-- .........
listFileNames("resources/countries") #=> ["usa", "japan", "gb"]
Try this, it works for me
import fs from "fs/promises";
const path = "path/to/folder";
export const readDir = async function readDir(path) {
const files = await fs.readdir(path);
// array of file names
console.log(files);
}
This is a TypeScript, optionally recursive, optionally error logging and asynchronous solution. You can specify a regular expression for the file names you want to find.
I used fs-extra, because its an easy super set improvement on fs.
import * as FsExtra from 'fs-extra'
/**
* Finds files in the folder that match filePattern, optionally passing back errors .
* If folderDepth isn't specified, only the first level is searched. Otherwise anything up
* to Infinity is supported.
*
* #static
* #param {string} folder The folder to start in.
* #param {string} [filePattern='.*'] A regular expression of the files you want to find.
* #param {(Error[] | undefined)} [errors=undefined]
* #param {number} [folderDepth=0]
* #returns {Promise<string[]>}
* #memberof FileHelper
*/
public static async findFiles(
folder: string,
filePattern: string = '.*',
errors: Error[] | undefined = undefined,
folderDepth: number = 0
): Promise<string[]> {
const results: string[] = []
// Get all files from the folder
let items = await FsExtra.readdir(folder).catch(error => {
if (errors) {
errors.push(error) // Save errors if we wish (e.g. folder perms issues)
}
return results
})
// Go through to the required depth and no further
folderDepth = folderDepth - 1
// Loop through the results, possibly recurse
for (const item of items) {
try {
const fullPath = Path.join(folder, item)
if (
FsExtra.statSync(fullPath).isDirectory() &&
folderDepth > -1)
) {
// Its a folder, recursively get the child folders' files
results.push(
...(await FileHelper.findFiles(fullPath, filePattern, errors, folderDepth))
)
} else {
// Filter by the file name pattern, if there is one
if (filePattern === '.*' || item.search(new RegExp(filePattern, 'i')) > -1) {
results.push(fullPath)
}
}
} catch (error) {
if (errors) {
errors.push(error) // Save errors if we wish
}
}
}
return results
}
Out of the box
In case you want an object with the directory structure out-of-the-box I highly reccomend you to check directory-tree.
Lets say you have this structure:
photos
│ june
│ └── windsurf.jpg
└── january
├── ski.png
└── snowboard.jpg
const dirTree = require("directory-tree");
const tree = dirTree("/path/to/photos");
Will return:
{
path: "photos",
name: "photos",
size: 600,
type: "directory",
children: [
{
path: "photos/june",
name: "june",
size: 400,
type: "directory",
children: [
{
path: "photos/june/windsurf.jpg",
name: "windsurf.jpg",
size: 400,
type: "file",
extension: ".jpg"
}
]
},
{
path: "photos/january",
name: "january",
size: 200,
type: "directory",
children: [
{
path: "photos/january/ski.png",
name: "ski.png",
size: 100,
type: "file",
extension: ".png"
},
{
path: "photos/january/snowboard.jpg",
name: "snowboard.jpg",
size: 100,
type: "file",
extension: ".jpg"
}
]
}
]
}
Custom Object
Otherwise if you want to create an directory tree object with your custom settings have a look at the following snippet. A live example is visible on this codesandbox.
// my-script.js
const fs = require("fs");
const path = require("path");
const isDirectory = filePath => fs.statSync(filePath).isDirectory();
const isFile = filePath => fs.statSync(filePath).isFile();
const getDirectoryDetails = filePath => {
const dirs = fs.readdirSync(filePath);
return {
dirs: dirs.filter(name => isDirectory(path.join(filePath, name))),
files: dirs.filter(name => isFile(path.join(filePath, name)))
};
};
const getFilesRecursively = (parentPath, currentFolder) => {
const currentFolderPath = path.join(parentPath, currentFolder);
let currentDirectoryDetails = getDirectoryDetails(currentFolderPath);
const final = {
current_dir: currentFolder,
dirs: currentDirectoryDetails.dirs.map(dir =>
getFilesRecursively(currentFolderPath, dir)
),
files: currentDirectoryDetails.files
};
return final;
};
const getAllFiles = relativePath => {
const fullPath = path.join(__dirname, relativePath);
const parentDirectoryPath = path.dirname(fullPath);
const leafDirectory = path.basename(fullPath);
const allFiles = getFilesRecursively(parentDirectoryPath, leafDirectory);
return allFiles;
};
module.exports = { getAllFiles };
Then you can simply do:
// another-file.js
const { getAllFiles } = require("path/to/my-script");
const allFiles = getAllFiles("/path/to/my-directory");
Here's an asynchronous recursive version.
function ( path, callback){
// the callback gets ( err, files) where files is an array of file names
if( typeof callback !== 'function' ) return
var
result = []
, files = [ path.replace( /\/\s*$/, '' ) ]
function traverseFiles (){
if( files.length ) {
var name = files.shift()
fs.stat(name, function( err, stats){
if( err ){
if( err.errno == 34 ) traverseFiles()
// in case there's broken symbolic links or a bad path
// skip file instead of sending error
else callback(err)
}
else if ( stats.isDirectory() ) fs.readdir( name, function( err, files2 ){
if( err ) callback(err)
else {
files = files2
.map( function( file ){ return name + '/' + file } )
.concat( files )
traverseFiles()
}
})
else{
result.push(name)
traverseFiles()
}
})
}
else callback( null, result )
}
traverseFiles()
}
Took the general approach of #Hunan-Rostomyan, made it a litle more concise and added excludeDirs argument. It'd be trivial to extend with includeDirs, just follow same pattern:
import * as fs from 'fs';
import * as path from 'path';
function fileList(dir, excludeDirs?) {
return fs.readdirSync(dir).reduce(function (list, file) {
const name = path.join(dir, file);
if (fs.statSync(name).isDirectory()) {
if (excludeDirs && excludeDirs.length) {
excludeDirs = excludeDirs.map(d => path.normalize(d));
const idx = name.indexOf(path.sep);
const directory = name.slice(0, idx === -1 ? name.length : idx);
if (excludeDirs.indexOf(directory) !== -1)
return list;
}
return list.concat(fileList(name, excludeDirs));
}
return list.concat([name]);
}, []);
}
Example usage:
console.log(fileList('.', ['node_modules', 'typings', 'bower_components']));
Using flatMap:
function getFiles(dir) {
return fs.readdirSync(dir).flatMap((item) => {
const path = `${dir}/${item}`;
if (fs.statSync(path).isDirectory()) {
return getFiles(path);
}
return path;
});
}
Given the following directory:
dist
├── 404.html
├── app-AHOLRMYQ.js
├── img
│ ├── demo.gif
│ └── start.png
├── index.html
└── sw.js
Usage:
getFiles("dist")
Output:
[
'dist/404.html',
'dist/app-AHOLRMYQ.js',
'dist/img/demo.gif',
'dist/img/start.png',
'dist/index.html'
]
I usually use: FS-Extra.
const fileNameArray = Fse.readdir('/some/path');
Result:
[
"b7c8a93c-45b3-4de8-b9b5-a0bf28fb986e.jpg",
"daeb1c5b-809f-4434-8fd9-410140789933.jpg"
]
Just a heads up: if you're planning to perform operations on each file in a directory, try vinyl-fs (which is used by gulp, the streaming build system).
This will work and store the result in test.txt file which will be present in the same directory
fs.readdirSync(__dirname).forEach(file => {
fs.appendFileSync("test.txt", file+"\n", function(err){
})
})
I've recently built a tool for this that does just this... It fetches a directory asynchronously and returns a list of items. You can either get directories, files or both, with folders being first. You can also paginate the data in case where you don't want to fetch the entire folder.
https://www.npmjs.com/package/fs-browser
This is the link, hope it helps someone!
I made a node module to automate this task: mddir
Usage
node mddir "../relative/path/"
To install: npm install mddir -g
To generate markdown for current directory: mddir
To generate for any absolute path: mddir /absolute/path
To generate for a relative path: mddir ~/Documents/whatever.
The md file gets generated in your working directory.
Currently ignores node_modules, and .git folders.
Troubleshooting
If you receive the error 'node\r: No such file or directory', the issue is that your operating system uses different line endings and mddir can't parse them without you explicitly setting the line ending style to Unix. This usually affects Windows, but also some versions of Linux. Setting line endings to Unix style has to be performed within the mddir npm global bin folder.
Line endings fix
Get npm bin folder path with:
npm config get prefix
Cd into that folder
brew install dos2unix
dos2unix lib/node_modules/mddir/src/mddir.js
This converts line endings to Unix instead of Dos
Then run as normal with: node mddir "../relative/path/".
Example generated markdown file structure 'directoryList.md'
|-- .bowerrc
|-- .jshintrc
|-- .jshintrc2
|-- Gruntfile.js
|-- README.md
|-- bower.json
|-- karma.conf.js
|-- package.json
|-- app
|-- app.js
|-- db.js
|-- directoryList.md
|-- index.html
|-- mddir.js
|-- routing.js
|-- server.js
|-- _api
|-- api.groups.js
|-- api.posts.js
|-- api.users.js
|-- api.widgets.js
|-- _components
|-- directives
|-- directives.module.js
|-- vendor
|-- directive.draganddrop.js
|-- helpers
|-- helpers.module.js
|-- proprietary
|-- factory.actionDispatcher.js
|-- services
|-- services.cardTemplates.js
|-- services.cards.js
|-- services.groups.js
|-- services.posts.js
|-- services.users.js
|-- services.widgets.js
|-- _mocks
|-- mocks.groups.js
|-- mocks.posts.js
|-- mocks.users.js
|-- mocks.widgets.js
Use npm list-contents module. It reads the contents and sub-contents of the given directory and returns the list of files' and folders' paths.
const list = require('list-contents');
list("./dist",(o)=>{
if(o.error) throw o.error;
console.log('Folders: ', o.dirs);
console.log('Files: ', o.files);
});
If many of the above options seem too complex or not what you are looking for here is another approach using node-dir - https://github.com/fshost/node-dir
npm install node-dir
Here is a somple function to list all .xml files searching in subdirectories
import * as nDir from 'node-dir' ;
listXMLs(rootFolderPath) {
let xmlFiles ;
nDir.files(rootFolderPath, function(err, items) {
xmlFiles = items.filter(i => {
return path.extname(i) === '.xml' ;
}) ;
console.log(xmlFiles) ;
});
}
function getFilesRecursiveSync(dir, fileList, optionalFilterFunction) {
if (!fileList) {
grunt.log.error("Variable 'fileList' is undefined or NULL.");
return;
}
var files = fs.readdirSync(dir);
for (var i in files) {
if (!files.hasOwnProperty(i)) continue;
var name = dir + '/' + files[i];
if (fs.statSync(name).isDirectory()) {
getFilesRecursiveSync(name, fileList, optionalFilterFunction);
} else {
if (optionalFilterFunction && optionalFilterFunction(name) !== true)
continue;
fileList.push(name);
}
}
}

Categories

Resources