Node.js file system: Promise once read all files - javascript

I am using Node.js file system to build an array of file paths. I would like to know when all files have been read, so I could work further with my array.
Sequence of events:
Go into a folder
Get a path of each file
Put each path into an array
Let me know once you're done
Code:
'use strict';
const fs = require('fs');
function readDirectory(path) {
return new Promise((resolve, reject) => {
const files = [];
fs.readdir(path, (err, contents) => {
if (err) {
reject(err);
}
contents.forEach((file) => {
const pathname = `${ path }/${ file }`;
getFilesFromPath(pathname).then(() => {
console.log('pathname', pathname);
files.push(pathname);
});
resolve(files);
});
});
});
}
function getFilesFromPath(path) {
return new Promise((resolve, reject) => {
const stat = fs.statSync(path);
if (stat.isFile()) {
fs.readFile(path, 'utf8', (err, data) => {
if (err) {
reject(err);
} else {
resolve(data);
}
});
} else if (stat.isDirectory()) {
readDirectory(path);
}
});
}
getFilesFromPath('./dist');
Would be great to glue with:
Promise.all(files).then(() => {
// do stuff
})

Your suggestion pretty much works - did you try it? Here's a typical way of doing it:
getFilesFromPath( path ).then( files => {
const filePromises = files.map( readFile );
return Promises.all( filePromises );
}).then( fileContentsArray => {
//do stuff - the array will contain the contents of each file
});
You'll have to write the "readFile()" function yourself, but looks like you got that covered.

Related

Useing fs.createReadStream and fs.createWriteStream for multiple files

I'm trying to loop over multiple files with fs.createReadStream, and I cannot figure out how to read the second file.
const fs = require('fs')
const csv = require('csv-parse')
const parser = csv({
...
})
const files = process.argv.slice(2)
async function analyzeFiles () {
for (const file of files) {
const string = file
console.log(`Analyzing ${file}.`)
await runFile(file, string)
console.log(`Analyzed ${file}.`)
}
}
async function runFile (filepath, string) {
return new Promise(function (resolve, reject) {
const shimmedData = {}
let fileName = ''
fs.createReadStream(filepath)
.pipe(parser)
.on('data', (row) => {
// ...
fileName = 'something dynamic from row'
shimmedData[index] = row // Or something similar, not sure this matters
})
.on('error', (e) => {
console.log('BONK', e)
})
.on('end', () => {
fs.writeFile(`${fileName}.json`, JSON.stringify(shimmedData), (err) => {
if (err) {
console.log(err)
reject(err)
} else {
console.log('File written successfully.')
resolve()
}
})
})
})
}
analyzeFiles()
And then I run node script.js file1.txt file2.txt file3.txt
When I run this, only the first file will ever be saved. Looking into it with console logs, it looks like for the second file, fs.createReadStream is never called.
➜ shimeBirdData git:(main) ✗ node stackoverflowExample.js sampleData/sample.txt sampleData/sample2.txt
Analyzing sampleData/sample.txt.
File written successfully.
Analyzed sampleData/sample.txt.
Analyzing sampleData/sample2.txt.
is saved.
What am I missing?

How to remove image file/ literally a file from a node js-express server

I'm a beginner in web development field and as my first project i'm trying to create a basic E-commerce application, I'm succeeded most of my way but now stuck in removing the image file of the product:
I'm using express-generator to create the server, i didn't change any default variable and only added mongodb api.
router.get('/deleteProduct/:id', (req, res) => {
let productId = req.params.id;
productsHelper.deleteProduct(productId).then((response) => {
res.redirect('/admin/');
});
}
module.exports.deleteProduct: (productId) => {
return new Promise((resolve, reject) => {
db.get().collection(collections.PRODUCT_COLLECTIONS).removeOne({_id:objectId(productId)}).
then((response) => {
resolve(response);
});
});
};
I also tried fs
**this my first time ever on stackoverflow ...
You can remove the file after the database call is successful,
const fs = require("fs");
module.exports.deleteProduct = (productId) => {
return new Promise((resolve, reject) => {
db.get()
.collection(collections.PRODUCT_COLLECTIONS)
.removeOne({ _id: objectId(productId) })
.then((response) => {
const path = "./file.png";
fs.unlink(path, (err) => {
if (err) {
console.error(err);
reject(err);
}
console.log("File removed");
resolve(response);
});
});
});
};

ffmpeg - on end function probably not working correctly?

got this code to loop through some .mp4 files and create a screenshot of them:
files.forEach(file => {
console.log(file);
if(!fs.existsSync('img/' + file.replace('.mp4', '.png'))) {
ffmpeg({ source: 'movies/' + file})
.takeScreenshots({ timemarks: [ '50' ], size: '150x100' }, 'img/', function(err, filenames) {
})
.on('end', function() {
fs.rename('img/tn.png', 'img/' + file.replace('.mp4', '.png'), function(err) {if (err) console.log('Error: ' + err) });
sleep(1000);
})
}
});
Now i got the problem that it seems like that .on(end) is sometimes not working, Error:
ENOENT: no such file or directory, rename
i think it´s because the process of saving the tn.png is slower than the renaming...
I wouldn't mix callback, sync, sleep and loop togather. You can use fs promise version to convert all your callback style to promise style. You could do sequentially or parallely.
Also, I would say convert the screenshot code to wrap in promise.
here is the code
const fs = require("fs").promises;
function takeScreenshot(file) {
return new Promise((resolve, reject) => {
ffmpeg({"source": `movies/${file}`})
.takeScreenshots({"timemarks": ["50"], "size": "150x100"}, "img/", function (err, filenames) {
})
.on("end", function () {
resolve();
})
.on("error", err => {
reject(err);
});
});
}
// execute one by one
async function sequential(files) {
for (const file of files) {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}
}
// execute in parallel
async function parallel(files) {
return Promise.all(files.map(async file => {
const fileExists = await fs.stat(`img/${file.replace(".mp4", ".png")}`);
if (fileExists) {
await takeScreenshot(file);
await fs.rename("img/tn.png", `img/${ file.replace(".mp4", ".png")}`);
}
}));
}
Hope this helps.

How should I download a file in Node? [duplicate]

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Find amounts of files and get total line count with Node FS

I'm building a node script asynchronously outputs a directory's file count and line count; however, I am having trouble with its asynchronous control flow.
// Import Dependencies
const fs = require('fs');
const get_dir_line_count = (dir) => {
let output = { file_count: 0, file_line: 0, path: '' };
new Promise( (resolve, reject) => {
fs.readdir(dir, (err, dir_contents) => {
resolve(dir_contents);
});
}).then( (promise_contents) => {
Promise.all(promise_contents.map( (file) => {
const file_path = dir + '/' + file;
return new Promise( (resolve, reject) => {
fs.stat(file_path, (err, stat) => {
if(err || file[0] === '.') return err;
if(stat.isDirectory() && file !== 'node_modules'){
get_dir_line_count(file_path);
}
else if(stat.isFile()){
promise_line_count(file_path)
.then( (line_count) => {
output.path = dir;
output.file_line += line_count;
output.file_count++;
resolve(output);
});
};
});
}).then( (resolved_output) => {
console.log(resolved_output)
return resolved_output;
});
}));
});
};
const promise_line_count = (pathToFile) => {
let line_count = 0;
return new Promise( (resolve, reject) => {
fs.createReadStream(pathToFile)
.on("data", (buffer) => {
buffer.forEach( (chunk) => {
if(chunk === 10) line_count++;
});
}).on("end", () => {
resolve(line_count);
});
});
};
const directory = process.argv[2];
get_dir_line_count('./../' + directory);
My intention is to recursively go through the directories that outputs Promise.all arrays. Each array is a collection of the directory's computed data. However, I am having asynchronous control flow issue upon the Promise.all. If anyone can provide feedback, that would be helpful.
Output:
Project = 5 Files, 50 lines
Project/src = 10 Files, 60 lines
Project/apple = 20 Files, 200 lines
...etc
One issue is that you're not returning anything from get_dir_line_count function itself:
const get_dir_line_count = (dir) => {
let output = { file_count: 0, file_line: 0, path: '' };
new Promise( (resolve, reject) => {
// ^---- missing a return statement
Another problem is that you forgot to return the result from Promise.all so the chain can be properly built:
// ...
}).then( (promise_contents) => {
Promise.all(promise_contents.map( (file) => {
// ^---- missing a return
You've also forgotten to return (or resolve) the recursive call to get_dir_line_count:
if(err || file[0] === '.') return err;
if(stat.isDirectory() && file !== 'node_modules'){
get_dir_line_count(file_path);
// ^--- missing a return statement or resolve statement
}
Finally, since you're returning the output object from get_dir_line_count, you can check things work by adding a then and passing the result into console.log:
const directory = process.argv[2];
get_dir_line_count('./../' + directory).then(console.log) // <-- get the output object and the log it
As far as dealing with the complexity of asynchronous code in general, main thing you can do to clean up the control flow is to extract individual logic into separate functions.
Bellow you can find a code example of one approach along with embedded comments (I also preserved the underscored naming preference):
const fs = require('fs');
const path = require('path');
// resolves with the file names within the given directory
function get_file_names(dir) {
return new Promise((resolve, reject) => {
fs.readdir(dir, (err, fileNames) => {
if (err) return reject(err);
resolve(fileNames);
});
});
}
// resolves with an object containing the type ('file' or 'dir') for the given file path and the file path itself: { file_path, type }
function get_path_and_type(file_path) {
return new Promise((resolve, reject) => {
fs.stat(file_path, (err, stat) => {
if (err) return reject(err);
if (!stat.isDirectory() && !stat.isFile()) reject('Invalid Type');
const type = stat.isDirectory() ? 'dir' : 'file';
resolve({
file_path,
type
});
});
});
}
// same as before, counts lines for the given file path
function count_lines(file_path) {
return new Promise((resolve, reject) => {
let lineCount = 0;
fs.createReadStream(file_path)
.on("data", (buffer) => {
buffer.forEach((chunk) => {
if (chunk === 10) lineCount++;
});
}).on("end", () => {
resolve(lineCount);
}).on("error", reject);
});
};
function get_dir_line_count(dir) {
const output = {
file_count: 0,
file_lines: 0,
path: dir
};
// get all filenames in the given directory
return get_file_names(dir)
// filter all file names that start with a '.' or include the string 'node_modules'
.then((names) =>
names.filter((name) =>
!name.startsWith('.') && !name.includes('node_modules')
)
)
// map every file name into a promise that resolves with the type for that file name within the given dir
.then((names) =>
names.map((name) =>
get_path_and_type(path.join(dir, name))
.catch(console.warn) // log invalid typed files if necessary
)
).then((paths_and_types_promises) =>
Promise.all(paths_and_types_promises.map((promise) =>
promise.then(({
file_path,
type
}) => {
if (type === 'dir') {
// if current file path corresponds to a directory
// recursive count its files and lines and add it to the overall output
return get_dir_line_count(file_path)
.then((recursive_output) => {
output.file_count += recursive_output.file_count;
output.file_lines += recursive_output.file_count;
});
} else {
// count the lines for the current file path and then update the overall output
return count_lines(file_path)
.then((file_lines) => {
output.file_lines += file_lines;
output.file_count += 1;
})
}
})
))
// this last chain makes sure we wait for the promise to resolve
// and populate the output object before resolving with it
).then(() => output);
}
get_dir_line_count(process.argv[2])
.then(console.log);
const fs = require('fs');
const path = require('path');
let output = {};
let lastDir = '';
const walk = (dir) => {
return new Promise((resolve, reject) => {
output[dir] = {
files: 0,
lines: 0,
path: ''
};
fs.readdir(dir, (err, list) => {
if (err) {
return reject(err);
}
let pending = list.length;
if (!pending) {
return resolve(output);
}
list.forEach((file) => {
file = path.resolve(dir, file);
fs.stat(file, (err, stat) => {
if (stat && stat.isDirectory()) {
walk(file)
.then((res) => {
if (!--pending) {
resolve(output);
}
})
}
else {
let lc = 0;
fs.createReadStream(file)
.on('data', (buffer) => {
buffer.forEach((chunk) => {
if (chunk === 10) {
lc++;
}
})
})
.on('end', () => {
output[dir].files++;
output[dir].lines += lc;
output[dir].path = dir;
if (!--pending) {
resolve(output);
}
});
}
})
})
})
});
};
walk('.')
.then(console.log)
.catch(console.log);

Categories

Resources