I'm trying to use a promise on this code:
//Listando arquivos
app.post('/readList', function(req, res) {
var cleared = false
var readList = new Promise(function(resolve, reject){
fs.readdir(req.body.path, (err, files) => {
files.forEach(file => {
console.log(file)
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if(!cleared){
listedFiles = []
cleared = true
}
fileDetail.name = file
fileDetail.filepath = req.body.path + file
fs.stat(req.body.path + file, function(err, stats) {
fileDetail.isFile = stats.isFile()
if(stats.isFile()) fileDetail.filetype = 'far fa-file-alt'
else fileDetail.filetype = 'fas fa-folder-open'
})
listedFiles.push(fileDetail)
})
})
})
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
})
I've putted this line to show the itens listing:
console.log(file)
And put this line to execute after promise:
readList.then(
console.log('vorta'),
res.end(JSON.stringify(listedFiles))
)
I don't know where is the mistake, but console is showing 'vorta' before the files names.
What am I doing wrong?
Here you're passing two params:
readList.then(
//#1 In this case you're executing the log function and cause that the message is being printed.
console.log('vorta'),
res.end(JSON.stringify(listedFiles)) //# 2
)
So, you need to pass a function
readList.then(function() {
console.log('vorta');
res.end(JSON.stringify(listedFiles));
})
Further, you need to call the function resolve within the async logic.
As I said in my earlier comment, there are at least four problems here:
You aren't calling resolve(listedFiles) to resolve the promise so its .then() handler is never called
You need to pass a single function to .then()
You have no error handling for your async operations
You seem to be assuming that fs.stat() is synchronous when it is not
The best way to attack this problem is to promisify all your asynchronous functions and then use promises for controlling the flow and the error handling. Here's a way to fix all of these issues:
const util = require('util');
const fs = require('fs');
const readdirAsync = util.promisify(fs.readdir);
const statAsync = util.promisify(fs.stat);
//Listando arquivos
app.post('/readList', function(req, res) {
// add code here to sanitize req.body.path so it can only
// point to a specific sub-directory that is intended for public consumption
readdirAsync(req.body.path).then(files => {
return Promise.all(files.map(file => {
let fileDetail = {
name: file,
local: true,
filepath: req.body.path + file
};
return statAsync(fileDetail.filepath).then(stats => {
fileDetail.isFile = stats.isFile();
fileDetail.filetype = fileDetail.isFile ? 'far fa-file-alt' : 'fas fa-folder-open';
return fileDetail;
});
}));
}).then(listedFiles => {
res.json(listedFiles);
}).catch(err => {
console.log(err);
res.sendStatus(500);
});
});
FYI, this is kind of a dangerous implementation because it lists files on ANY path that the user passes in so any outsider can see the entire file listing on your server's hard drive. It could even list network attached drives.
You should be limiting the scope of the req.body.path to only a specific file hieararchy that is intended for public consumption.
You need to pass a function to then.
As it stands, you are calling log and end immediately and passing their return values.
Here is working copy of your code, I have made few changes that you can omit since those are only to give you a working code:
var express = require('express');
var fs = require('fs');
var app = express();
app.post('/readList', function(req, res) {
//Assuming sample data coming in req.body
//Remove this when you run at you side
req.body = {
path: 'temp_dir'
};
var cleared = false;
var listedFiles = [];
//Promising readList :)
function readList(){
return new Promise(function (resolve, reject) {
// Suppose req.body.path is 'temp_dir' and it has 2 files
fs.readdir(req.body.path, (err, files) => {
console.log(files);
//in following commented code you can do what you need
/*files.forEach(file = > {
console.log(file);
var fileDetail = {
name: '',
local: true,
filetype: 'fas fa-folder-open',
filepath: '',
isFile: false
}
if (!cleared) {
listedFiles = [];
cleared = true;
}
// really? you can think of it later!!
fileDetail.name = file;
fileDetail.filepath = req.body.path + file
// I changed below to avoid surprises for you in data!
const stats = fs.statSync(req.body.path + file);
fileDetail.isFile = stats.isFile();
if (stats.isFile())
fileDetail.filetype = 'far fa-file-alt';
else
fileDetail.filetype = 'fas fa-folder-open'
listedFiles.push(fileDetail);
});*/
resolve(listedFiles);
});
});
}
readList().then((data) => {
console.log('vorta');
// data here will be containing same data as listedFiles so choose your way of doing, I would recommend to go with data
res.send(JSON.stringify(listedFiles)); // fine
// res.send(JSON.stringify(data)); // better and recommended
});
})
app.listen(process.env.PORT || 3000);
console.log('Listening on port 3000');
Related
I want to take the message information from the user and save it in a JSON file and this data is constantly added, but with the following code, this data is constantly replaced.
and I don't replace data I want to add data
this is my code :
const fs = require("fs");
const { Client, Intents } = require("discord.js");
const client = new Client({
intents: [Intents.FLAGS.GUILDS, Intents.FLAGS.GUILD_MESSAGES],
});
const now = new Date();
const obj = {
table: [],
};
let confirm = false;
const { badWords } = require("../badWordList.json");
client.on("message", async (message) => {
if (message.author.bot) return;
for (let i = 0; i < badWords.length; i++) {
if (message.content.toLowerCase().includes(badWords[i].toLowerCase()))
confirm = true;
}
if (confirm) {
obj.table.push({
name: message.author.username,
date: `${now.getFullYear()}/${now.getMonth()}/${now.getDate()}`,
message: message.channel.messages.channel.lastMessage.cleanContent,
});
fs.writeFile("myjsonfile.json", JSON.stringify(obj), function (err) {
if (err) throw err;
console.log("complete");
});
}
});
When using the fs.writeFile() it replaces the content of the file, as written in the docs.
At a first glance, you might want to use fs.write(), see the docs for usage.
But the NodeJS docs says :
It is unsafe to use fs.write() multiple times on the same file without waiting for the callback. For this scenario, fs.createWriteStream() is recommended.
Since you are in asynchronous mode, you shoud probably define a write stream to the file and then write to it, it gives you something like that :
// -snip-
const whateverJsonFileStream = fs.createWriteStream("myjsonfile.json");
client.on("message", async (message) => {
//-snip-
if (confirm) {
obj.table.push({
name: message.author.username,
date: `${now.getFullYear()}/${now.getMonth()}/${now.getDate()}`,
message: message.channel.messages.channel.lastMessage.cleanContent,
});
whateverJsonFileStream.write(JSON.stringify(obj), function (err) {
if (err) throw err;
console.log("complete");
});
}
});
I'm trying to use stream-json to read a zip, unzip it, and then write it to file. I don't think I understand how to use the library.
Based on the link above, they have this example:
const {chain} = require('stream-chain');
const {parser} = require('stream-json');
const {pick} = require('stream-json/filters/Pick');
const {ignore} = require('stream-json/filters/Ignore');
const {streamValues} = require('stream-json/streamers/StreamValues');
const fs = require('fs');
const zlib = require('zlib');
const pipeline = chain([
fs.createReadStream('sample.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}),
ignore({filter: /\b_meta\b/i}),
streamValues(),
data => {
const value = data.value;
// keep data only for the accounting department
return value && value.department === 'accounting' ? data : null;
}
]);
let counter = 0;
pipeline.on('data', () => ++counter);
pipeline.on('end', () =>
console.log(`The accounting department has ${counter} employees.`));
However I don't want to count anything, I just want to write to file. Here is what I have that works:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'AllPrintings.json');
console.info('Attempting to read zip');
return new Promise((resolve, reject) => {
let error = null;
Fs.readFile(zipPath, (err, data) => {
error = err;
if (!err) {
const zip = new JSZip();
zip.loadAsync(data).then((contents) => {
Object.keys(contents.files).forEach((filename) => {
console.info(`Writing ${filename} to disk...`);
zip.file(filename).async('nodebuffer').then((content) => {
Fs.writeFileSync(jsonPath, content);
}).catch((writeErr) => { error = writeErr; });
});
}).catch((zipErr) => { error = zipErr; });
resolve();
} else if (error) {
console.log(error);
reject(error);
}
});
});
}
However I can't easily add any processing to this, so I wanted to replace it with stream-json. This is my partial attempt, as I don't know how to finish:
function unzipJson() {
const zipPath = Path.resolve(__dirname, 'resources', 'myfile.json.zip');
const jsonPath = Path.resolve(__dirname, 'resources', 'myfile.json');
console.info('Attempting to read zip');
const pipeline = chain([
Fs.createReadStream(zipPath),
zlib.createGunzip(),
parser(),
Fs.createWriteStream(jsonPath),
]);
// use the chain, and save the result to a file
pipeline.on(/*what goes here?*/)
Later on I intend to add extra processing of the json file(s), but I want to learn the basics before I start throwing in extra functionality.
I can't produce a minimal example unfortunately, as I don't know what goes into the pipeline.on function. I'm trying to understand what I should do, not what I've done wrong.
I also looked at the related stream-chain, which has an example that ends like so:
// use the chain, and save the result to a file
dataSource.pipe(chain).pipe(fs.createWriteStream('output.txt.gz'));`
But at no point does the documentation explain where dataSource comes from, and I think my chain creates it's own by reading the zip from file?
How am I supposed to use these streaming libraries to write to file?
I don't want to count anything, I just want to write to file
In that case, you'll need to convert the token/JSON data stream back into a text stream that you can write to a file. You can use the library's Stringer for that. Its documentation also contains an example that seems to be more in line with what you want to do:
chain([
fs.createReadStream('data.json.gz'),
zlib.createGunzip(),
parser(),
pick({filter: 'data'}), // omit this if you don't want to do any processing
stringer(),
zlib.Gzip(), // omit this if you want to write an unzipped result
fs.createWriteStream('edited.json.gz')
]);
I am working on post method in the server side to retrieve all files inside the requested directory (not recursive), and below is my code.
I am having difficulty sending the response back (res.json(pathContent);) with the updated pathContent without using the setTimeout.
I understand that this is due to the asynchronous behavior of the file system methods used (readdir and stat) and need to use some sort of callback, async, or promise technique.
I tried to use the async.waterfall with the entire body of readdir as one function and the res.json(pathContent) as the other, but it didn't send the updated array to the client side.
I know that there have been thousands of questions regarding this asynchronous operation but could not figure out how to solve my case after reading number of posts.
Any comments would be appreciated. Thanks.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
fs.readdir(newPath, (err, files) => {
if (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
const absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
fs.stat(absPath + '/' + file, (err, stats) => {
if (err) {
console.log(`${err}`);
return;
}
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
});
});
setTimeout(() => { res.json(pathContent); }, 100);
});
The easiest and cleanest way would be use await/async, that way you can make use of promises and the code will almost look like synchronous code.
You therefor need a promisified version of readdir and stat that can be create by the promisify of the utils core lib.
const { promisify } = require('util')
const readdir = promisify(require('fs').readdir)
const stat = promisify(require('fs').stat)
async function getPathContent(newPath) {
// move pathContent otherwise can have conflicts with concurrent requests
const pathContent = [];
let files = await readdir(newPath)
let pathName = newPath;
// pathContent.length = 0; // not needed anymore because pathContent is new for each request
const absPath = path.resolve(pathName);
// iterate each file
// replace forEach with (for ... of) because this makes it easier
// to work with "async"
// otherwise you would need to use files.map and Promise.all
for (let file of files) {
// get file info and store in pathContent
try {
let stats = await stat(absPath + '/' + file)
if (stats.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (stats.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
} catch (err) {
console.log(`${err}`);
}
}
return pathContent;
}
app.post('/api/files', (req, res, next) => {
const newPath = req.body.path;
getPathContent(newPath).then((pathContent) => {
res.json(pathContent);
}, (err) => {
res.status(422).json({
message: `${err}`
});
})
})
And you should not concatenated paths using + (absPath + '/' + file), use path.join(absPath, file) or path.resolve(absPath, file) instead.
And you never should write your code in a way that the code executed for the request, relays on global variables like var pathName = ''; and const pathContent = [];. This might work in your testing environment, but will for sure lead to problems in production. Where two request work on the variable at the "same time"
Based on the initial comment I received and the reference, I used readdirSync and statSync instead and was able to make it work. I will review other answers as well and learn about other ways to implement this.
Thank you all for your kind inputs.
Here is my solution.
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const path = require('path');
var pathName = '';
const pathContent = [];
app.post('/api/files', (req, res) => {
const newPath = req.body.path;
// validate path
let files;
try {
files = fs.readdirSync(newPath);
} catch (err) {
res.status(422).json({ message: `${err}` });
return;
}
// set the pathName and empty pathContent
pathName = newPath;
pathContent.length = 0;
// iterate each file
let absPath = path.resolve(pathName);
files.forEach(file => {
// get file info and store in pathContent
let fileStat = fs.statSync(absPath + '/' + file);
if (fileStat.isFile()) {
pathContent.push({
path: pathName,
name: file.substring(0, file.lastIndexOf('.')),
type: file.substring(file.lastIndexOf('.') + 1).concat(' File'),
})
} else if (fileStat.isDirectory()) {
pathContent.push({
path: pathName,
name: file,
type: 'Directory',
});
}
});
res.json(pathContent);
});
There is different way to do it :
You can first promisify the function with using new Promise() then second, use async/await or .then()
You can use the function ProsifyAll() of the Bluebird package (https://www.npmjs.com/package/bluebird)
You can use the synchrone version of the fs functions
https://nodejs.org/api/fs.html#fs_fs_readdirsync_path_options
https://nodejs.org/api/fs.html#fs_fs_statsync_path_options
Here's some options:
Use the synchronous file methods (check the docs, but they usually end with Sync). Slower, but a fairly simple code change, and very easy to understand.
Use promises (or util.promisify) to create a promise for each stat, and Promise.all to wait for all the stats to complete. After that, you can use async functions and await as well for easier to read code and simpler error handling. (Probably the largest code change, but it will make the async code easier to follow)
Keep a counter of the number of stats you have done, and if that number is the size you expect, then call res.json form inside the stat callback (smallest code change, but very error prone)
I want to refactor my function to return a readable stream that I will pipe to http request module,
currently I'm returning the archived file location and creating a readstream from it:
const filepath = yield archive.archiveFilesAsTargz('path', 'name.tar.gz');
fs.createReadStream(filepath).pipe(request(options)).then(body =>{
console.log(body);
});
The flow I'm seeking is:
get a directory location as and archive it
get the archive as stream and return it (resolve it)
invoke the function and pipe the read stream to request
my function is as follows:
exports.archiveFilesAsTargz = function (dest, archivedName) {
return new Promise((resolve, reject)=> {
const name = slugify(archivedName);
const filePath = path.join(dest, name + '.tar.gz');
const output = fs.createWriteStream(filePath);
const archive = archiver('tar', {
gzip: true
});
archive.pipe(output);
archive.directory(dest, name).finalize();
output.on('close', ()=> resolve(filePath));
archive.on('error' ,(err) => reject(err));
});
};
OK so after another reading session and plays I solved it...
function archiveFilesAsTargz (dest, name) {
const archive = archiver('tar', {
gzip: true
});
return archive.directory(dest, name).finalize();
}
the following will return a readstream :
archive.directory(dest, name).finalize();
so using it a follows worked great for me
const pack = archiveFilesAsTargz(zippath, 'liron');
pack.pipe(request(options)).then(body =>{
console.log(body);
})
.catch(err => {
console.log(err);
});
I am trying to loop through and pick up files in a directory, but I have some trouble implementing it. How to pull in multiple files and then move them to another folder?
var dirname = 'C:/FolderwithFiles';
console.log("Going to get file info!");
fs.stat(dirname, function (err, stats) {
if (err) {
return console.error(err);
}
console.log(stats);
console.log("Got file info successfully!");
// Check file type
console.log("isFile ? " + stats.isFile());
console.log("isDirectory ? " + stats.isDirectory());
});
Older answer with callbacks
You want to use the fs.readdir function to get the directory contents and the fs.rename function to actually do the renaming. Both these functions have synchronous versions if you need to wait for them to finishing before running the code afterwards.
I wrote a quick script that does what you described.
var fs = require('fs');
var path = require('path');
// In newer Node.js versions where process is already global this isn't necessary.
var process = require("process");
var moveFrom = "/home/mike/dev/node/sonar/moveme";
var moveTo = "/home/mike/dev/node/sonar/tome"
// Loop through all the files in the temp directory
fs.readdir(moveFrom, function (err, files) {
if (err) {
console.error("Could not list the directory.", err);
process.exit(1);
}
files.forEach(function (file, index) {
// Make one pass and make the file complete
var fromPath = path.join(moveFrom, file);
var toPath = path.join(moveTo, file);
fs.stat(fromPath, function (error, stat) {
if (error) {
console.error("Error stating file.", error);
return;
}
if (stat.isFile())
console.log("'%s' is a file.", fromPath);
else if (stat.isDirectory())
console.log("'%s' is a directory.", fromPath);
fs.rename(fromPath, toPath, function (error) {
if (error) {
console.error("File moving error.", error);
} else {
console.log("Moved file '%s' to '%s'.", fromPath, toPath);
}
});
});
});
});
Tested on my local machine.
node testme.js
'/home/mike/dev/node/sonar/moveme/hello' is a file.
'/home/mike/dev/node/sonar/moveme/test' is a directory.
'/home/mike/dev/node/sonar/moveme/test2' is a directory.
'/home/mike/dev/node/sonar/moveme/test23' is a directory.
'/home/mike/dev/node/sonar/moveme/test234' is a directory.
Moved file '/home/mike/dev/node/sonar/moveme/hello' to '/home/mike/dev/node/sonar/tome/hello'.
Moved file '/home/mike/dev/node/sonar/moveme/test' to '/home/mike/dev/node/sonar/tome/test'.
Moved file '/home/mike/dev/node/sonar/moveme/test2' to '/home/mike/dev/node/sonar/tome/test2'.
Moved file '/home/mike/dev/node/sonar/moveme/test23' to '/home/mike/dev/node/sonar/tome/test23'.
Moved file '/home/mike/dev/node/sonar/moveme/test234' to '/home/mike/dev/node/sonar/tome/test234'.
Update: fs.promises functions with async/await
Inspired by ma11hew28's answer (shown here), here is the same thing as above but with the async functions in fs.promises. As noted by ma11hew28, this may have memory limitations versus fs.promises.opendir added in v12.12.0.
Quick code below.
//jshint esversion:8
//jshint node:true
const fs = require( 'fs' );
const path = require( 'path' );
const moveFrom = "/tmp/movefrom";
const moveTo = "/tmp/moveto";
// Make an async function that gets executed immediately
(async ()=>{
// Our starting point
try {
// Get the files as an array
const files = await fs.promises.readdir( moveFrom );
// Loop them all with the new for...of
for( const file of files ) {
// Get the full paths
const fromPath = path.join( moveFrom, file );
const toPath = path.join( moveTo, file );
// Stat the file to see if we have a file or dir
const stat = await fs.promises.stat( fromPath );
if( stat.isFile() )
console.log( "'%s' is a file.", fromPath );
else if( stat.isDirectory() )
console.log( "'%s' is a directory.", fromPath );
// Now move async
await fs.promises.rename( fromPath, toPath );
// Log because we're crazy
console.log( "Moved '%s'->'%s'", fromPath, toPath );
} // End for...of
}
catch( e ) {
// Catch anything bad that happens
console.error( "We've thrown! Whoops!", e );
}
})(); // Wrap in parenthesis and call now
fs.readdir(path[, options], callback) (which Mikey A. Leonetti used in his answer) and its variants (fsPromises.readdir(path[, options]) and fs.readdirSync(path[, options])) each reads all of a directory's entries into memory at once. That's good for most cases, but if the directory has very many entries and/or you want to lower your application's memory footprint, you could instead iterate over the directory's entries one at a time.
Asynchronously
Directories are async iterable, so you could do something like this:
const fs = require('fs')
async function ls(path) {
const dir = await fs.promises.opendir(path)
for await (const dirent of dir) {
console.log(dirent.name)
}
}
ls('.').catch(console.error)
Or, you could use dir.read() and/or dir.read(callback) directly.
Synchronously
Directories aren't sync iterable, but you could use dir.readSync() directly. For example:
const fs = require('fs')
const dir = fs.opendirSync('.')
let dirent
while ((dirent = dir.readSync()) !== null) {
console.log(dirent.name)
}
dir.closeSync()
Or, you could make directories sync iterable. For example:
const fs = require('fs')
function makeDirectoriesSyncIterable() {
const p = fs.Dir.prototype
if (p.hasOwnProperty(Symbol.iterator)) { return }
const entriesSync = function* () {
try {
let dirent
while ((dirent = this.readSync()) !== null) { yield dirent }
} finally { this.closeSync() }
}
if (!p.hasOwnProperty(entriesSync)) { p.entriesSync = entriesSync }
Object.defineProperty(p, Symbol.iterator, {
configurable: true,
enumerable: false,
value: entriesSync,
writable: true
})
}
makeDirectoriesSyncIterable()
And then, you could do something like this:
const dir = fs.opendirSync('.')
for (const dirent of dir) {
console.log(dirent.name)
}
Note: "In busy processes, use the asynchronous versions of these calls. The synchronous versions will block the entire process until they complete, halting all connections."
References:
Node.js Documentation: File System: Class fs.Dir
Node.js source code: fs.Dir
GitHub: nodejs/node: Issues: streaming / iterative fs.readdir #583
Read all folders in a directory
const readAllFolder = (dirMain) => {
const readDirMain = fs.readdirSync(dirMain);
console.log(dirMain);
console.log(readDirMain);
readDirMain.forEach((dirNext) => {
console.log(dirNext, fs.lstatSync(dirMain + "/" + dirNext).isDirectory());
if (fs.lstatSync(dirMain + "/" + dirNext).isDirectory()) {
readAllFolder(dirMain + "/" + dirNext);
}
});
};
The answers provided are for a single folder. Here is an asynchronous implementation for multiple folders where all the folders are processed simultaneously but the smaller folders or files gets completed first.
Please comment if you have any feedback
Asynchronously Multiple Folders
const fs = require('fs')
const util = require('util')
const path = require('path')
// Multiple folders list
const in_dir_list = [
'Folder 1 Large',
'Folder 2 Small', // small folder and files will complete first
'Folder 3 Extra Large'
]
// BEST PRACTICES: (1) Faster folder list For loop has to be outside async_capture_callback functions for async to make sense
// (2) Slower Read Write or I/O processes best be contained in an async_capture_callback functions because these processes are slower than for loop events and faster completed items get callback-ed out first
for (i = 0; i < in_dir_list.length; i++) {
var in_dir = in_dir_list[i]
// function is created (see below) so each folder is processed asynchronously for readFile_async that follows
readdir_async_capture(in_dir, function(files_path) {
console.log("Processing folders asynchronously ...")
for (j = 0; j < files_path.length; j++) {
file_path = files_path[j]
file = file_path.substr(file_path.lastIndexOf("/") + 1, file_path.length)
// function is created (see below) so all files are read simultaneously but the smallest file will be completed first and get callback-ed first
readFile_async_capture(file_path, file, function(file_string) {
try {
console.log(file_path)
console.log(file_string)
} catch (error) {
console.log(error)
console.log("System exiting first to catch error if not async will continue...")
process.exit()
}
})
}
})
}
// fs.readdir async_capture function to deal with asynchronous code above
function readdir_async_capture(in_dir, callback) {
fs.readdir(in_dir, function(error, files) {
if (error) { return console.log(error) }
files_path = files.map(function(x) { return path.join(in_dir, x) })
callback(files_path)
})
}
// fs.readFile async_capture function to deal with asynchronous code above
function readFile_async_capture(file_path, file, callback) {
fs.readFile(file_path, function(error, data) {
if (error) { return console.log(error) }
file_string = data.toString()
callback(file_string)
})
}