I am working on post method in the server side to retrieve all files inside the requested directory (not recursive), and below is my code.
I am having difficulty sending the response back (res.json(pathContent);) with the updated pathContent without using the setTimeout.
I understand that this is due to the asynchronous behavior of the file system methods used (readdir and stat) and need to use some sort of callback, async, or promise technique.
I tried to use the async.waterfall with the entire body of readdir as one function and the res.json(pathContent) as the other, but it didn't send the updated array to the client side.
I know that there have been thousands of questions regarding this asynchronous operation but could not figure out how to solve my case after reading number of posts.
Any comments would be appreciated. Thanks.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
fs.readdir(newPath, (err, files) => {
if (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
const absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
fs.stat(absPath + '/' + file, (err, stats) => {
if (err) {
console.log(`${err}`);
return;
}
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
});
});
setTimeout(() => { res.json(pathContent); }, 100);
});
The easiest and cleanest way would be use await/async, that way you can make use of promises and the code will almost look like synchronous code.
You therefor need a promisified version of readdir and stat that can be create by the promisify of the utils core lib.
const { promisify } = require('util')
const readdir = promisify(require('fs').readdir)
const stat = promisify(require('fs').stat)
async function getPathContent(newPath) {
// move pathContent otherwise can have conflicts with concurrent requests
const pathContent = [];
let files = await readdir(newPath)
let pathName = newPath;
// pathContent.length = 0; // not needed anymore because pathContent is new for each request
const absPath = path.resolve(pathName);
// iterate each file
// replace forEach with (for ... of) because this makes it easier
// to work with "async"
// otherwise you would need to use files.map and Promise.all
for (let file of files) {
// get file info and store in pathContent
try {
let stats = await stat(absPath + '/' + file)
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
} catch (err) {
console.log(`${err}`);
}
}
return pathContent;
}
app.post('/api/files', (req, res, next) => {
const newPath = req.body.path;
getPathContent(newPath).then((pathContent) => {
res.json(pathContent);
}, (err) => {
res.status(422).json({
message: `${err}`
});
})
})
And you should not concatenated paths using + (absPath + '/' + file), use path.join(absPath, file) or path.resolve(absPath, file) instead.
And you never should write your code in a way that the code executed for the request, relays on global variables like var pathName = ''; and const pathContent = [];. This might work in your testing environment, but will for sure lead to problems in production. Where two request work on the variable at the "same time"
Based on the initial comment I received and the reference, I used readdirSync and statSync instead and was able to make it work. I will review other answers as well and learn about other ways to implement this.
Thank you all for your kind inputs.
Here is my solution.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
// validate path
let files;
try {
files = fs.readdirSync(newPath);
} catch (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
let absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
let fileStat = fs.statSync(absPath + '/' + file);
if (fileStat.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (fileStat.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
res.json(pathContent);
});
There is different way to do it :
You can first promisify the function with using new Promise() then second, use async/await or .then()
You can use the function ProsifyAll() of the Bluebird package (https://www.npmjs.com/package/bluebird)
You can use the synchrone version of the fs functions
https://nodejs.org/api/fs.html#fs_fs_readdirsync_path_options
https://nodejs.org/api/fs.html#fs_fs_statsync_path_options
Here's some options:
Use the synchronous file methods (check the docs, but they usually end with Sync). Slower, but a fairly simple code change, and very easy to understand.
Use promises (or util.promisify) to create a promise for each stat, and Promise.all to wait for all the stats to complete. After that, you can use async functions and await as well for easier to read code and simpler error handling. (Probably the largest code change, but it will make the async code easier to follow)
Keep a counter of the number of stats you have done, and if that number is the size you expect, then call res.json form inside the stat callback (smallest code change, but very error prone)
Related
I am trying to export database properties stored in properties file from Javascript module. By the time I read database properties file, Javascript file is already exported and data properties appear undefined wherever I use in other modules.
const Pool = require('pg').Pool;
const fs = require('fs')
const path = require('path');
class DbConfig {
constructor(dbData) {
this.pool = new Pool({
user: dbData['user'],
host: dbData['host'],
database: dbData['database'],
password: dbData['password'],
max: 20,
port: 5432
});
}
}
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
if (err) {
console.error(err)
return
}
// dbData = {"user":"postgres", "password": "1234"...};
return dbData;
});
}
let db = new DbConfig(getdbconf());
let dbPool = db.pool;
console.log("dbpool : -> : ",dbPool); // username and password appear undefined
module.exports = { dbPool };
Is there a way to read data before exporting data from Javascript module?
Usually database config or any other sensitive info is read from a .env file using dotenv .
Or
you could also provide env from command line itself like
DB_HOST=127.0.0.1 node index.js
inside your index.js
console.log(process.env.DB_HOST)
Please create a new file (connection-pool.js) and paste this code:
const { Pool } = require('pg');
const poolConnection = new Pool({
user: 'postgresUserName',
host: 'yourHost',
database: 'someNameDataBase',
password: 'postgresUserPassword',
port: 5432,
});
console.log('connectionOptions', poolConnection.options);
module.exports = poolConnection;
For use it, create a new file (demo-connection.js) and paste this code:
const pool = require('./connection-pool');
pool.query('SELECT NOW();', (err, res) => {
if (err) {
// throw err;
console.log('connection error');
return;
}
if (res) {
console.log(res.rows);
pool.end();
}
});
This is an alternative option 🙂
Exporting the result of async calls
To export values which have been obtained asynchronously, export a Promise.
const fs = require('fs/promises'); // `/promise` means no callbacks, Promise returned
const dbDataPromise = fs.readFile('fileToRead')); //`readFile` returns Promise now
module.exports = dbDataPromise;
Importing
When you need to use the value,
const dbDataPromise = require('./dbdata');
async init() {
const dbData = await dbDataPromise;
}
//or without async, using Promise callbacks
init() {
dbDataPromise
.then(dbData => the rest of your code that depends on dbData here);
}
Current code broken
Please note that your current code, as pasted above, is broken:
function getdbconf() {
const dbData = {};
fs.readFile("../../db_properties.txt"), 'utf8', (err, data) => {
//[...] snipped for brevity
return dbData;
});
}
fs.readFile "returns" dbData, but there is nothing to return to, since you are in a callback which you did not call yourself. Function getdbconf returns nothing.
The line that says let db = new DbConfig(getdbconf()); will NOT work. It needs to be inside the callback.
The only way to avoid putting all of your code inside the callback (and "flatten" it) is to use await, or to use readFileSync
Avoiding the issue
Using environment variables
Suhas Nama's suggestion is a good one, and is common practice. Try putting the values you need in environment variables.
Using synchronous readFile
While using synchronous calls does block the event loop, it's ok to do during initialization, before your app is up and running.
This avoids the problem of having everything in a callback or having to export Promises, and is often the best solution.
I am trying to write a simple express/node.js app that responds to GET requests using data found in a csv file. I would like to read this csv file to generate a javascript object (essentially a key-value mapping), and then make that generated map available for the HTTP request handling logic in the controller.
I wrote a module that reads the csv files and exports the desired objects, but I'm not sure how to ensure:
This operation completes and the objects actually exist before HTTP requests are handled
The file operation is performed only a single time when the server starts up and not once per request incurring massive overhead
How can I organize my code to meet these goals in the context of an express app?
This is how I am processing the CSV file:
var myMap = {};
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
});
// Does this work?
module.exports = myMap;
How about ensuring http object listens after the file is loaded into memory:
// server.js
var myMap = {};
function readCsv(cb){
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
cb();
});
}
var app = express();
exports = Object.freeze({
server: http.createServer(app)
init(){
readCsv(() => {
this.server.listen(80)
})
}
})
Something like that.
You can also utilize Promise
// server.js
var myMap = {};
function readCsv(){
return new Promise((resolve, reject) => {
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
resolve();
})
.on('error', reject)
})
}
var app = express();
exports = Object.freeze({
server: http.createServer(app)
init(){
return readCsv().then(() => {
this.server.listen(80)
})
}
})
I would look for more synchronous way to read file and handle http request. Here is sample code of what it should look like,
import fs from 'fs';
async function processCSV() {
try {
let map = await readCsv();
//handle http request in another function with same async await way
let http = await processHttpRequest(map);
// process the http response
} catch (e) {
console.log('e', e);
}
}
function readCsv()
{
let myMap = [];
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
return myMap[row['key']] = row['value'];
})
.on('end', () => {
console.log('Done.');
});
}
async function processHttpRequest(map)
{
try
{
let reqres = await httpReuqest(map); // Your defined function for httpReuqest
}
catch (e)
{
}
}
processCSV();
processHttpReuqet();
In order to meet both of your goals, you can include the code in the app.js file. App.js only runs when the express server starts. It doesn't reload on page refresh. You can run app.listen after the readstream ends.
var myMap = {};
fs.createReadStream('filename.csv')
.pipe(csv())
.on('data', (row) => {
// Build javascript object
myMap[row['key']] = row['value'];
})
.on('end', () => {
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
});
However, since I don't think you're going to have a lot of data, it's better to use a synchronous (blocking) methods, for both the csv parser and file reader. This just makes it easier to understand. I use csv-parse below.
const express = require('express')
const fs = require('fs')
const parse = require('csv-parse/lib/sync')
const app = express()
const port = 3000
/* In this example assume myMap will be
/ `
/ "key_1","key_2"
/ "value 1","value 2"
/ `
*/
var myMap = fs.readFileSync('sample.csv', 'utf8');
/* parsing the csv will return:
/ [Object {key_1: "value 1", key_2: "value 2"}]
*/
const records = parse(myMap, {
columns: true,
skip_empty_lines: true
})
app.get('/', (req, res) => res.send('Hello World!' + records[0].key_1))
app.listen(port, () => console.log(`Example app listening on port ${port}!`))
test it on runkit
Update:
use https://csv.js.org/parse/
Below one is deprecated, not maintained anymore.
Deprecated:
Hi I have created an npm package to read CSV synchronously or as a promise :
https://www.npmjs.com/package/csv-parser-sync-plus-promise
Description:
csv-parser-sync-plus-promise
A module to read csv synchronously or as promise
Features
now read any csv synchronously or as promise. Choice is yours
Usage
let parser = require('csv-parser-sync-plus-promise')
// for sync
let a=parser.readCsvSync('<filepath>')
// for promise
let b=parser.readCsvPromise('<filepath>')
**Note:** You can use both fully qualified and relative paths <filepath>
Errors
All errors will be printed as console.error and the process will exit with exit code 222
I'm a bit of newbie to Node so be gentle. I'm creating an app for my wedding which takes an uploaded guestlist (in Excel file format) and turns it into a JSON array which I can then use to build profiles about each guest (dietary requirements, rsvp answer, etc).
So far I've got a form on the homepage which allows the user to upload the .xlsx file and when the form is submitted the user is redirected back to the homepage again.
I've created the following route:
router.post('/',
guestsController.upload,
guestsController.getGuestlist,
guestsController.displayGuestlist
);
and here's my guestsController:
const multer = require('multer');
const convertExcel = require('excel-as-json').processFile;
const storage = multer.diskStorage({ //multers disk storage settings
destination: function (req, file, cb) {
cb(null, './uploads/')
},
filename: function (req, file, cb) {
var datetimestamp = Date.now();
cb(null, file.fieldname + '-' + datetimestamp + '.' + file.originalname.split('.')[file.originalname.split('.').length -1])
}
});
exports.upload = multer({storage: storage}).single('file');
exports.getGuestlist = async (req, res, next) => {
try {
await convertExcel(req.file.path, null, null, (err, guestData) => {
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
});
console.log(req.guestlist);
next();
} catch (e){
res.json({error_code:1,err_desc:"Corrupted Excel file"});
next();
}
};
exports.displayGuestlist = (req, res) => {
console.log(req.guestlist);
};
At the moment because of the synchronous nature of the functions, displayGuestlist is returning undefined to the console because covertExcel has not finished grabbing the data. You can see I have tried to use the new async await syntax to resolve this but it hasn't fixed it unfortunately.
I have also tried putting the log in displayGuestlist in a timeout function which has proven that this is purely a timing issue.
Any helpers would be much appreciated.
It looks like convertExcel is not a Promise-returning function, but rather uses an old-school callback. await does not work with those, so it's instead awaiting Promise.resolve(undefined) since the function returns undefined, not a Promise. Thankfully, in Node 8 and later, there's a promisify utility to convert callback-style functions to Promise-returning functions so that await can be used.
const { promisify } = require('util');
const convertExcel = promisify(require('excel-as-json').processFile);
// ...
const guestData = await convertExcel(req.file.path, null, null);
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
You can encapsulate your code in a promise and await this promise to resolve.
exports.getGuestlist = async (req, res, next) => {
let promise = new Promise((resolve, reject) => {
convertExcel(req.file.path, null, null, (err, guestData) => {
if(err) reject(err);
else resolve(guestData);
});
});
try {
let guestData = await promise;
req.guestlist = guestData.map((guestObj) => Object.values(guestObj)[0]);
console.log(req.guestlist);
next();
} catch (e){
res.json({error_code:1,err_desc:"Corrupted Excel file"});
next();
}
};
I'm trying to use a promise on this code:
//Listando arquivos
app.post('/readList', function(req, res) {
var cleared = false
var readList = new Promise(function(resolve, reject){
fs.readdir(req.body.path, (err, files) => {
files.forEach(file => {
console.log(file)
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if(!cleared){
listedFiles = []
cleared = true
}
fileDetail.name = file
fileDetail.filepath = req.body.path + file
fs.stat(req.body.path + file, function(err, stats) {
fileDetail.isFile = stats.isFile()
if(stats.isFile()) fileDetail.filetype = 'far fa-file-alt'
else fileDetail.filetype = 'fas fa-folder-open'
})
listedFiles.push(fileDetail)
})
})
})
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
})
I've putted this line to show the itens listing:
console.log(file)
And put this line to execute after promise:
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
I don't know where is the mistake, but console is showing 'vorta' before the files names.
What am I doing wrong?
Here you're passing two params:
readList.then(
//#1 In this case you're executing the log function and cause that the message is being printed.
console.log('vorta'),
res.end(JSON.stringify(listedFiles)) //# 2
)
So, you need to pass a function
readList.then(function() {
console.log('vorta');
res.end(JSON.stringify(listedFiles));
})
Further, you need to call the function resolve within the async logic.
As I said in my earlier comment, there are at least four problems here:
You aren't calling resolve(listedFiles) to resolve the promise so its .then() handler is never called
You need to pass a single function to .then()
You have no error handling for your async operations
You seem to be assuming that fs.stat() is synchronous when it is not
The best way to attack this problem is to promisify all your asynchronous functions and then use promises for controlling the flow and the error handling. Here's a way to fix all of these issues:
const util = require('util');
const fs = require('fs');
const readdirAsync = util.promisify(fs.readdir);
const statAsync = util.promisify(fs.stat);
//Listando arquivos
app.post('/readList', function(req, res) {
// add code here to sanitize req.body.path so it can only
// point to a specific sub-directory that is intended for public consumption
readdirAsync(req.body.path).then(files => {
return Promise.all(files.map(file => {
let fileDetail = {
name: file,
local: true,
filepath: req.body.path + file
};
return statAsync(fileDetail.filepath).then(stats => {
fileDetail.isFile = stats.isFile();
fileDetail.filetype = fileDetail.isFile ? 'far fa-file-alt' : 'fas fa-folder-open';
return fileDetail;
});
}));
}).then(listedFiles => {
res.json(listedFiles);
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
});
FYI, this is kind of a dangerous implementation because it lists files on ANY path that the user passes in so any outsider can see the entire file listing on your server's hard drive. It could even list network attached drives.
You should be limiting the scope of the req.body.path to only a specific file hieararchy that is intended for public consumption.
You need to pass a function to then.
As it stands, you are calling log and end immediately and passing their return values.
Here is working copy of your code, I have made few changes that you can omit since those are only to give you a working code:
var express = require('express');
var fs = require('fs');
var app = express();
app.post('/readList', function(req, res) {
//Assuming sample data coming in req.body
//Remove this when you run at you side
req.body = {
path: 'temp_dir'
};
var cleared = false;
var listedFiles = [];
//Promising readList :)
function readList(){
return new Promise(function (resolve, reject) {
// Suppose req.body.path is 'temp_dir' and it has 2 files
fs.readdir(req.body.path, (err, files) => {
console.log(files);
//in following commented code you can do what you need
/*files.forEach(file = > {
console.log(file);
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if (!cleared) {
listedFiles = [];
cleared = true;
}
// really? you can think of it later!!
fileDetail.name = file;
fileDetail.filepath = req.body.path + file
// I changed below to avoid surprises for you in data!
const stats = fs.statSync(req.body.path + file);
fileDetail.isFile = stats.isFile();
if (stats.isFile())
fileDetail.filetype = 'far fa-file-alt';
else
fileDetail.filetype = 'fas fa-folder-open'
listedFiles.push(fileDetail);
});*/
resolve(listedFiles);
});
});
}
readList().then((data) => {
console.log('vorta');
// data here will be containing same data as listedFiles so choose your way of doing, I would recommend to go with data
res.send(JSON.stringify(listedFiles)); // fine
// res.send(JSON.stringify(data)); // better and recommended
});
})
app.listen(process.env.PORT || 3000);
console.log('Listening on port 3000');
I am trying to loop through and pick up files in a directory, but I have some trouble implementing it. How to pull in multiple files and then move them to another folder?
var dirname = 'C:/FolderwithFiles';
console.log("Going to get file info!");
fs.stat(dirname, function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
Older answer with callbacks
You want to use the fs.readdir function to get the directory contents and the fs.rename function to actually do the renaming. Both these functions have synchronous versions if you need to wait for them to finishing before running the code afterwards.
I wrote a quick script that does what you described.
var fs = require('fs');
var path = require('path');
// In newer Node.js versions where process is already global this isn't necessary.
var process = require("process");
var moveFrom = "/home/mike/dev/node/sonar/moveme";
var moveTo = "/home/mike/dev/node/sonar/tome"
// Loop through all the files in the temp directory
fs.readdir(moveFrom, function (err, files) {
if (err) {
console.error("Could not list the directory.", err);
process.exit(1);
}
files.forEach(function (file, index) {
// Make one pass and make the file complete
var fromPath = path.join(moveFrom, file);
var toPath = path.join(moveTo, file);
fs.stat(fromPath, function (error, stat) {
if (error) {
console.error("Error stating file.", error);
return;
}
if (stat.isFile())
console.log("'%s' is a file.", fromPath);
else if (stat.isDirectory())
console.log("'%s' is a directory.", fromPath);
fs.rename(fromPath, toPath, function (error) {
if (error) {
console.error("File moving error.", error);
} else {
console.log("Moved file '%s' to '%s'.", fromPath, toPath);
}
});
});
});
});
Tested on my local machine.
node testme.js
'/home/mike/dev/node/sonar/moveme/hello' is a file.
'/home/mike/dev/node/sonar/moveme/test' is a directory.
'/home/mike/dev/node/sonar/moveme/test2' is a directory.
'/home/mike/dev/node/sonar/moveme/test23' is a directory.
'/home/mike/dev/node/sonar/moveme/test234' is a directory.
Moved file '/home/mike/dev/node/sonar/moveme/hello' to '/home/mike/dev/node/sonar/tome/hello'.
Moved file '/home/mike/dev/node/sonar/moveme/test' to '/home/mike/dev/node/sonar/tome/test'.
Moved file '/home/mike/dev/node/sonar/moveme/test2' to '/home/mike/dev/node/sonar/tome/test2'.
Moved file '/home/mike/dev/node/sonar/moveme/test23' to '/home/mike/dev/node/sonar/tome/test23'.
Moved file '/home/mike/dev/node/sonar/moveme/test234' to '/home/mike/dev/node/sonar/tome/test234'.
Update: fs.promises functions with async/await
Inspired by ma11hew28's answer (shown here), here is the same thing as above but with the async functions in fs.promises. As noted by ma11hew28, this may have memory limitations versus fs.promises.opendir added in v12.12.0.
Quick code below.
//jshint esversion:8
//jshint node:true
const fs = require( 'fs' );
const path = require( 'path' );
const moveFrom = "/tmp/movefrom";
const moveTo = "/tmp/moveto";
// Make an async function that gets executed immediately
(async ()=>{
// Our starting point
try {
// Get the files as an array
const files = await fs.promises.readdir( moveFrom );
// Loop them all with the new for...of
for( const file of files ) {
// Get the full paths
const fromPath = path.join( moveFrom, file );
const toPath = path.join( moveTo, file );
// Stat the file to see if we have a file or dir
const stat = await fs.promises.stat( fromPath );
if( stat.isFile() )
console.log( "'%s' is a file.", fromPath );
else if( stat.isDirectory() )
console.log( "'%s' is a directory.", fromPath );
// Now move async
await fs.promises.rename( fromPath, toPath );
// Log because we're crazy
console.log( "Moved '%s'->'%s'", fromPath, toPath );
} // End for...of
}
catch( e ) {
// Catch anything bad that happens
console.error( "We've thrown! Whoops!", e );
}
})(); // Wrap in parenthesis and call now
fs.readdir(path[, options], callback) (which Mikey A. Leonetti used in his answer) and its variants (fsPromises.readdir(path[, options]) and fs.readdirSync(path[, options])) each reads all of a directory's entries into memory at once. That's good for most cases, but if the directory has very many entries and/or you want to lower your application's memory footprint, you could instead iterate over the directory's entries one at a time.
Asynchronously
Directories are async iterable, so you could do something like this:
const fs = require('fs')
async function ls(path) {
const dir = await fs.promises.opendir(path)
for await (const dirent of dir) {
console.log(dirent.name)
}
}
ls('.').catch(console.error)
Or, you could use dir.read() and/or dir.read(callback) directly.
Synchronously
Directories aren't sync iterable, but you could use dir.readSync() directly. For example:
const fs = require('fs')
const dir = fs.opendirSync('.')
let dirent
while ((dirent = dir.readSync()) !== null) {
console.log(dirent.name)
}
dir.closeSync()
Or, you could make directories sync iterable. For example:
const fs = require('fs')
function makeDirectoriesSyncIterable() {
const p = fs.Dir.prototype
if (p.hasOwnProperty(Symbol.iterator)) { return }
const entriesSync = function* () {
try {
let dirent
while ((dirent = this.readSync()) !== null) { yield dirent }
} finally { this.closeSync() }
}
if (!p.hasOwnProperty(entriesSync)) { p.entriesSync = entriesSync }
Object.defineProperty(p, Symbol.iterator, {
configurable: true,
enumerable: false,
value: entriesSync,
writable: true
})
}
makeDirectoriesSyncIterable()
And then, you could do something like this:
const dir = fs.opendirSync('.')
for (const dirent of dir) {
console.log(dirent.name)
}
Note: "In busy processes, use the asynchronous versions of these calls. The synchronous versions will block the entire process until they complete, halting all connections."
References:
Node.js Documentation: File System: Class fs.Dir
Node.js source code: fs.Dir
GitHub: nodejs/node: Issues: streaming / iterative fs.readdir #583
Read all folders in a directory
const readAllFolder = (dirMain) => {
const readDirMain = fs.readdirSync(dirMain);
console.log(dirMain);
console.log(readDirMain);
readDirMain.forEach((dirNext) => {
console.log(dirNext, fs.lstatSync(dirMain + "/" + dirNext).isDirectory());
if (fs.lstatSync(dirMain + "/" + dirNext).isDirectory()) {
readAllFolder(dirMain + "/" + dirNext);
}
});
};
The answers provided are for a single folder. Here is an asynchronous implementation for multiple folders where all the folders are processed simultaneously but the smaller folders or files gets completed first.
Please comment if you have any feedback
Asynchronously Multiple Folders
const fs = require('fs')
const util = require('util')
const path = require('path')
// Multiple folders list
const in_dir_list = [
'Folder 1 Large',
'Folder 2 Small', // small folder and files will complete first
'Folder 3 Extra Large'
]
// BEST PRACTICES: (1) Faster folder list For loop has to be outside async_capture_callback functions for async to make sense
// (2) Slower Read Write or I/O processes best be contained in an async_capture_callback functions because these processes are slower than for loop events and faster completed items get callback-ed out first
for (i = 0; i < in_dir_list.length; i++) {
var in_dir = in_dir_list[i]
// function is created (see below) so each folder is processed asynchronously for readFile_async that follows
readdir_async_capture(in_dir, function(files_path) {
console.log("Processing folders asynchronously ...")
for (j = 0; j < files_path.length; j++) {
file_path = files_path[j]
file = file_path.substr(file_path.lastIndexOf("/") + 1, file_path.length)
// function is created (see below) so all files are read simultaneously but the smallest file will be completed first and get callback-ed first
readFile_async_capture(file_path, file, function(file_string) {
try {
console.log(file_path)
console.log(file_string)
} catch (error) {
console.log(error)
console.log("System exiting first to catch error if not async will continue...")
process.exit()
}
})
}
})
}
// fs.readdir async_capture function to deal with asynchronous code above
function readdir_async_capture(in_dir, callback) {
fs.readdir(in_dir, function(error, files) {
if (error) { return console.log(error) }
files_path = files.map(function(x) { return path.join(in_dir, x) })
callback(files_path)
})
}
// fs.readFile async_capture function to deal with asynchronous code above
function readFile_async_capture(file_path, file, callback) {
fs.readFile(file_path, function(error, data) {
if (error) { return console.log(error) }
file_string = data.toString()
callback(file_string)
})
}