Converting callbacks with for loop and recursion to promises - javascript

I wrote a function running recursively to find out files whose name include given world. I do not understand how promises works and cannot find a way to write this function with promises despite trying hard.
I tried returning a promise inside findPath function but I couldn't use it since extractFiles calls findPath. I tried to create a list of promises and return all but couldn't succeed neither.
So how could I write these functions with promises?
const fs = require('fs');
const path = require('path');
function findPath(targetPath, targetWord, done) {
if (!fs.existsSync(targetPath)) return;
fs.readdir(targetPath, (err, allPaths) => {
if (err) done(err, null);
for (aPath of allPaths) {
aPath = path.join(targetPath, aPath);
extractFiles(aPath, targetWord, done);
}
});
function extractFiles(aPath, targetWord, done) {
fs.lstat(aPath, (err, stat) => {
if (err) done(err, null);
if (stat.isDirectory()) {
findPath(aPath, targetWord, done);
}
else if (aPath.indexOf(targetWord) >= 0) {
let fileName = aPath.split('.')[0];
done(null, fileName);
}
});
}
}
findPath('../modules', 'routes', file => {
console.log(file);
});

Firstly, to make the "core" code more readable, I'd promisify the fs functions
const promisify1p = fn => p1 => new Promise((resolve, reject) => {
fn(p1, (err, result) => {
if(err) {
reject(err);
} else {
resolve(result);
}
});
});
const readdirAsync = promisify1p(fs.readdir);
const lstatAsync = promisify1p(fs.lstat);
Then, just chain the promises as you would with any other promises
const fs = require('fs');
const path = require('path');
function findPath(targetPath, targetWord) {
const readPath = target =>
readdirAsync(target)
.then(allPaths =>
Promise.all(allPaths.map(aPath => extractFiles(path.join(target, aPath))))
.then(x => x.filter(x=>x)) // remove all the "false" entries - i.e. that don't match targetWord
.then(x => [].concat.apply([], x)) // flatten the result
);
const extractFiles = aPath =>
lstatAsync(aPath).then(stat => {
if (stat.isDirectory()) {
return readPath(aPath);
} else if (aPath.includes(targetWord)) {
return aPath.split('.')[0];
}
return false;
});
return readPath(targetPath);
}
findPath('../modules', 'routes')
.then(results => {
// do things with the results - which is an array of files that contain the targetWord
})
.catch(err => console.error(err));
Not much to it at all.

Related

creating asynchronous function with a loop inside

I have a little problem, I need the makeZip function to wait for the takeScreenshot function to take all the screenshots it needs, how do I do this while taking care of best practices?
(I know at this point "then" doesn't make sense with the post method, I just tried it my way before but it didn't work the way I wanted)
Function:
const takeScreenshot = (url) => {
const resolutionsArray = Object.values(resolutions);
resolutionsArray.map(async (mediaSize) => {
webshot(url, setFileName(url, mediaSize), setOptions(mediaSize), (err) => {
if (!err) {
console.log("screenshot taken!");
}
});
});
};
calling functions:
app.post("/", async (req, res) => {
const { url } = req.body;
takeScreenshot(url)
.then((url) => makeZip(url))
.then((url) => sendEmail(url))
.then((message) => res.send(message))
.catch((err) => console.log(err));
});
My suggestion is:
to use Promise.all or Promise.allSettled when you need to handle several promises
extract callback of map fn
const makeWebshot = (argsHere) => new Promise((reselove, reject) => {
webshot(url, setFileName(url, mediaSize), setOptions(mediaSize), (err) => {
if (err) return reject(err);
return resolve();
});
});
Update takeScreenshot to
const takeScreenshot = (url) => {
const resolutionsArray = Object.values(resolutions);
return Promise.all(resolutionsArray.map((mediaSize) => makeWebshot(argsHere)));
};
When dealing with a list of Promises you will want to use Promise.all to wait for them all to resolve. Here is a simple example:
const list = [1,2,3];
const all = list.map(i => new Promise((resolve, reject) => {
setTimeout(() => {
console.log(i);
resolve(i*2);
}, 100);
}));
Promise.all(all).then(console.log)
In your case it would be something like this:
const takeScreenshot = (url) =>
Object.values(resolutions).map(async (mediaSize) => {
webshot(url, setFileName(url, mediaSize), setOptions(mediaSize), (err) => {
if (!err) {
console.log("screenshot taken!");
}
});
});
app.post("/", async (req, res) => {
const { url } = req.body;
Promise.all(takeScreenshot(url))
.then((listOfUrls) => ...
});
But since I don't know what webshot returns, I can't tell you what the processing of the listOfUrls should look like.

How to get value inside foreach in nodejs

I'm trying to develop a simple app that if you pass a parameter in command line the application will search inside a directory and if the text match in some of the files the file should be save in a list, but when I put the console.log the value is not updated
here is my code:
const folder = "./movies/data";
const fs = require("fs");
var args = process.argv.slice(2);
console.log("myArgs: ", args);
var count = 0;
var list = [];
fs.readdir(folder, (err, files) => {
files.forEach((file) => {
fs.readFile(`movies/data/${file}`, "utf8", function (err, data) {
if (err) console.log(err);
if (data.includes("walt disney")) {
count++;
list.push(data);
console.log("Found in: ", data);
}
});
});
console.log(`Foram encontradas ${count} ocorrĂȘncias pelo termo ${args}.`);
});
any suggestions about what i'm doing wrong?
For your program to work, you will have to add some Promise / async/await logic. On the moment you try to read from the files, the files are still undefined so the fs.readDir() function will not provide the wanted result.
This should work:
const { resolve } = require('path');
const { readdir } = require('fs').promises;
const fs = require("fs");
var args = process.argv.slice(2);
const pathToFiles = "./movies/";
async function getFiles(dir) {
const dirents = await readdir(dir, { withFileTypes: true });
const files = await Promise.all(dirents.map((dirent) => {
const res = resolve(dir, dirent.name);
return dirent.isDirectory() ? getFiles(res) : res;
}));
return Array.prototype.concat(...files);
}
getFiles(pathToFiles)
.then(files => {
console.log(files)
files.forEach((file) => {
fs.readFile(file, 'utf8', (err, data) => {
if (err) console.log(err);
if (data.includes(args)) {
console.log(`${args} found in ${file}.`);
} else {
console.log(`${args} not found.`);
}
});
})
})
.catch (e => console.error(e));

Run line at node script end?

I've written a script that recursively asynchronously modifies js files in a directory. It's made up of a search(dirname) function which searches a directory for js files, and a modify(filename) which does the modifying.
let totalFilesSearched = 0;
const search = (dir) => {
fs.readdir(dir, (err, list) => {
if (err) return;
list.forEach((filename) => {
const filepath = path.join(dir, filename);
if (filename.endsWith('.js')) {
modify(filepath);
} else if (fs.lstatSync(filepath).isDirectory()) {
search(filepath);
}
})
});
}
const modify = (filename) => {
fs.readFile(filename, 'utf8', (err, data) => {
if (err) console.log(err);
// ... my modification code ...
totalFilesSearched++;
});
}
search(args[0])
console.log(`Total files searched: ${totalFilesSearched}`);
I want to print out the totalFilesSearched at the end of my script but because my code is asynchronous, it just prints Total files searched: 0 right away.
Does anyone know how I'd wait until the script is about to end to print this out? I'm having trouble because both my search() and modify() functions are asynchronous.
Use Promises instead, and then call console.log when everything is resolved. Use promisify to turn the callbacks into promises:
const { promisify } = require('util');
const readFile = promisify(fs.readFile);
const readDir = util.promisify(fs.readdir);
const search = (dir) => (
readDir(dir).then((list) => (
Promise.all(list.map((filename) => {
const filepath = path.join(dir, filename);
if (filename.endsWith('.js')) {
return modify(filepath); // recursively return the promise
} else if (fs.lstatSync(filepath).isDirectory()) {
return search(filepath); // recursively return the promise
}
}))
))
.catch(err => void 0)
);
const modify = (filename) => (
readFile(filename, 'utf8')
.then((data) => {
// other code
totalFilesSearched++;
}).catch(err => console.log(err))
)
search(args[0])
.then(() => {
console.log(`Total files searched: ${totalFilesSearched}`);
});
Self answer:
Just use process.on('exit', callback_function_to_execute_at_end)
Its built into node, your callback will get executed right before the process exits.

How should I download a file in Node? [duplicate]

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Find amounts of files and get total line count with Node FS

I'm building a node script asynchronously outputs a directory's file count and line count; however, I am having trouble with its asynchronous control flow.
// Import Dependencies
const fs = require('fs');
const get_dir_line_count = (dir) => {
let output = { file_count: 0, file_line: 0, path: '' };
new Promise( (resolve, reject) => {
fs.readdir(dir, (err, dir_contents) => {
resolve(dir_contents);
});
}).then( (promise_contents) => {
Promise.all(promise_contents.map( (file) => {
const file_path = dir + '/' + file;
return new Promise( (resolve, reject) => {
fs.stat(file_path, (err, stat) => {
if(err || file[0] === '.') return err;
if(stat.isDirectory() && file !== 'node_modules'){
get_dir_line_count(file_path);
}
else if(stat.isFile()){
promise_line_count(file_path)
.then( (line_count) => {
output.path = dir;
output.file_line += line_count;
output.file_count++;
resolve(output);
});
};
});
}).then( (resolved_output) => {
console.log(resolved_output)
return resolved_output;
});
}));
});
};
const promise_line_count = (pathToFile) => {
let line_count = 0;
return new Promise( (resolve, reject) => {
fs.createReadStream(pathToFile)
.on("data", (buffer) => {
buffer.forEach( (chunk) => {
if(chunk === 10) line_count++;
});
}).on("end", () => {
resolve(line_count);
});
});
};
const directory = process.argv[2];
get_dir_line_count('./../' + directory);
My intention is to recursively go through the directories that outputs Promise.all arrays. Each array is a collection of the directory's computed data. However, I am having asynchronous control flow issue upon the Promise.all. If anyone can provide feedback, that would be helpful.
Output:
Project = 5 Files, 50 lines
Project/src = 10 Files, 60 lines
Project/apple = 20 Files, 200 lines
...etc
One issue is that you're not returning anything from get_dir_line_count function itself:
const get_dir_line_count = (dir) => {
let output = { file_count: 0, file_line: 0, path: '' };
new Promise( (resolve, reject) => {
// ^---- missing a return statement
Another problem is that you forgot to return the result from Promise.all so the chain can be properly built:
// ...
}).then( (promise_contents) => {
Promise.all(promise_contents.map( (file) => {
// ^---- missing a return
You've also forgotten to return (or resolve) the recursive call to get_dir_line_count:
if(err || file[0] === '.') return err;
if(stat.isDirectory() && file !== 'node_modules'){
get_dir_line_count(file_path);
// ^--- missing a return statement or resolve statement
}
Finally, since you're returning the output object from get_dir_line_count, you can check things work by adding a then and passing the result into console.log:
const directory = process.argv[2];
get_dir_line_count('./../' + directory).then(console.log) // <-- get the output object and the log it
As far as dealing with the complexity of asynchronous code in general, main thing you can do to clean up the control flow is to extract individual logic into separate functions.
Bellow you can find a code example of one approach along with embedded comments (I also preserved the underscored naming preference):
const fs = require('fs');
const path = require('path');
// resolves with the file names within the given directory
function get_file_names(dir) {
return new Promise((resolve, reject) => {
fs.readdir(dir, (err, fileNames) => {
if (err) return reject(err);
resolve(fileNames);
});
});
}
// resolves with an object containing the type ('file' or 'dir') for the given file path and the file path itself: { file_path, type }
function get_path_and_type(file_path) {
return new Promise((resolve, reject) => {
fs.stat(file_path, (err, stat) => {
if (err) return reject(err);
if (!stat.isDirectory() && !stat.isFile()) reject('Invalid Type');
const type = stat.isDirectory() ? 'dir' : 'file';
resolve({
file_path,
type
});
});
});
}
// same as before, counts lines for the given file path
function count_lines(file_path) {
return new Promise((resolve, reject) => {
let lineCount = 0;
fs.createReadStream(file_path)
.on("data", (buffer) => {
buffer.forEach((chunk) => {
if (chunk === 10) lineCount++;
});
}).on("end", () => {
resolve(lineCount);
}).on("error", reject);
});
};
function get_dir_line_count(dir) {
const output = {
file_count: 0,
file_lines: 0,
path: dir
};
// get all filenames in the given directory
return get_file_names(dir)
// filter all file names that start with a '.' or include the string 'node_modules'
.then((names) =>
names.filter((name) =>
!name.startsWith('.') && !name.includes('node_modules')
)
)
// map every file name into a promise that resolves with the type for that file name within the given dir
.then((names) =>
names.map((name) =>
get_path_and_type(path.join(dir, name))
.catch(console.warn) // log invalid typed files if necessary
)
).then((paths_and_types_promises) =>
Promise.all(paths_and_types_promises.map((promise) =>
promise.then(({
file_path,
type
}) => {
if (type === 'dir') {
// if current file path corresponds to a directory
// recursive count its files and lines and add it to the overall output
return get_dir_line_count(file_path)
.then((recursive_output) => {
output.file_count += recursive_output.file_count;
output.file_lines += recursive_output.file_count;
});
} else {
// count the lines for the current file path and then update the overall output
return count_lines(file_path)
.then((file_lines) => {
output.file_lines += file_lines;
output.file_count += 1;
})
}
})
))
// this last chain makes sure we wait for the promise to resolve
// and populate the output object before resolving with it
).then(() => output);
}
get_dir_line_count(process.argv[2])
.then(console.log);
const fs = require('fs');
const path = require('path');
let output = {};
let lastDir = '';
const walk = (dir) => {
return new Promise((resolve, reject) => {
output[dir] = {
files: 0,
lines: 0,
path: ''
};
fs.readdir(dir, (err, list) => {
if (err) {
return reject(err);
}
let pending = list.length;
if (!pending) {
return resolve(output);
}
list.forEach((file) => {
file = path.resolve(dir, file);
fs.stat(file, (err, stat) => {
if (stat && stat.isDirectory()) {
walk(file)
.then((res) => {
if (!--pending) {
resolve(output);
}
})
}
else {
let lc = 0;
fs.createReadStream(file)
.on('data', (buffer) => {
buffer.forEach((chunk) => {
if (chunk === 10) {
lc++;
}
})
})
.on('end', () => {
output[dir].files++;
output[dir].lines += lc;
output[dir].path = dir;
if (!--pending) {
resolve(output);
}
});
}
})
})
})
});
};
walk('.')
.then(console.log)
.catch(console.log);

Categories

Resources