Order of script execution with Promise - javascript

I know that this question is almost the same as this one: Execution order of Promises but can someone explain to me where is my mistake?
I have the next functions:
// The main function
function startTesting() {
console.info("--- Thanks! Testing is running... ---");
checkFolderExistence(dirPath)
.then(checkFolderContent)
.then(searchForImportFolder)
.then(connectToDB)
.catch(err => console.error("*** ERROR *** " + err));
}
function checkFolderExistence(path) {
console.info('--- Checking the folder "' + path + '" existence... ---');
let promise = new Promise(function(resolve, reject) {
fs.readdir(path, (err) => {
if(err) {
console.error('*** ERROR **** The folder "C:\\For_testing" doesn\'t exist. Testing is stopped!!! ***');
} else {
console.info("--- The folder \"C:\\For_testing\" exists... ---");
resolve(path);
};
});
});
return promise;
}
function checkFolderContent(path) {
console.info('--- Checking the folder "' + path + '" content... ---');
filesArray = fs.readdirSync(path);
if(filesArray.length == 0) {
console.error('*** ERROR *** There are no any files in ' + path + '. Testing is stopped!!! ***');
} else {
console.info('--- The folder is checked. It contains the next files: ---');
for(let i = 0; i < filesArray.length; i++) {
console.info(filesArray[i]);
}
};
}
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
fs.readdir(destFolderPath64, (err) => {
if(err) {
fs.readdir(destFolderPath32, (err) => {
if(err) {
console.error('*** ERROR *** The folder ".../Import" was not found ***');
} else {
console.info('--- The folder ".../Import" was successfully found... ---');
trueDestPath = destFolderPath32;
}
});
} else {
console.info('--- The folder "C:/Program Files (x86)/StoreLine/Office/Import" was successfully found... ---');
trueDestPath = destFolderPath64;
}
});
}
function connectToDB() {
console.info('--- Connecting to the database... ---');
let pool = new sql.ConnectionPool(config);
pool.connect()
.then(pool => {
console.info("--- Connected to the database! ---");
readDB(pool)
.then(function() {
console.info("--- All needed information from DB was successfully received ---");
})
.catch(err => console.error("*** ERROR *** " + err));
})
.catch(err => {
pool = new sql.ConnectionPool(configWithoutPassw);
pool.connect()
.then(pool => {
console.info("--- Connected to the database without the password! ---");
readDB(pool)
.then(function() {
console.info("--- All needed information from the DB was successfully received ---");
})
.catch(err => console.error("*** ERROR ***" + err));
})
.catch(err => {
console.error("*** ERROR *** Can't connect to the DB ***")
sql.close();
});
});
}
I need a strict order of execution of the functions: checkFolderContent => searchForImportFolder => connectToDB.
In fact the execution is the next: checkFolderContent is executed fully, then searchForImportFolder starts execute (I can see the line "--- Searching for ".../Import" folder... ---" in a console) but right after that connectToDB starts and the next line "--- Connecting to the database... ---" is appeared. And after that line I see "--- The folder ".../Import" was successfully found... ---" from the previous function.
What did I do wrong? I've read that in .then() function should return a promise. How can I do that?

searchForImportFolder doesn't return a promise, so the chain doesn't wait for that promise to complete. Do the same thing in searchForImportFolder that you've done in checkFolderExistence: Wrap the callback-style API in a promise.
A couple of notes:
checkFolderExistence should call reject in the error path; it doesn't currently.
Node provides a promisify function you can use to wrap callback-style API calls in promises, rather than doing it manually. Or you could use the promisify-fs npm module, or the promisify npm module that lets you promisify an entire API at once, or Node's own experimental promises API for fs.
You might want to make checkFolderContent async (again using promises) rather than using readdirSync, which holds up the main thread waiting on I/O.
If you're using any recent version of Node, you might want to switch to using async functions and the await keyword, as it lets you write your logical flow rather than writing a bunch of callbacks.
searchForImportFolder should return its result rather than setting a global.
So for instance, here are checkFolderExistence and searchForImportFolder using util.promisify (these assume searchForImportFolder should return its result, so you'll have to adjust code using it):
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
function checkFolderExistence(path) {
console.info('--- Checking the folder "' + path + '" existence... ---');
return readdirPromise(path)
.then(path => {
console.info("--- The folder \"C:\\For_testing\" exists... ---");
return path;
})
.catch(error => {
console.error('*** ERROR **** The folder "C:\\For_testing" doesn\'t exist. Testing is stopped!!! ***');
});
}
// ...
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
return readdirPromise(destFolderPath64)
.then(() => {
console.info('--- The folder "C:/Program Files (x86)/StoreLine/Office/Import" was successfully found... ---');
return destFolderPath64;
})
.catch(() => readdirPromise(destFolderPath32))
.then(() => {
console.info('--- The folder ".../Import" was successfully found... ---');
return destFolderPath32;
})
.catch(error => {
console.error('*** ERROR *** The folder ".../Import" was not found ***');
throw error;
});
}
If you don't need all that logging, checkFolderExistence just becomes readdirPromise, and searchForImportFolder becomes:
Or if you don't need all that logging (presumably that was for debugging):
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
// ...
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
return readdirPromise(destFolderPath64)
.then(() => {
return destFolderPath64;
})
.catch(() => readdirPromise(destFolderPath32));
}
And here they are using util.promisify and async/await:
Or using util.promisify and async/await:
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
// ...
async function searchForImportFolder() {
try {
await readdirPromise(destFolderPath64);
return destFolderPath64;
} catch (error) {
await readdirPromise(destFolderPath32);
return destFolderPath32;
}
}
If you want to avoid searching for the two different folders repeatedly, a simple tactic is just to remember the promise from searchForImportFolder and then use then on it any time you need that value:
const importFolderPromise = searchForImportFolder();
...then when you need it:
importFolderPromise.then(folder => {/*...*/});
...or in an async function:
const folder = await importFolderPromise;
The search will only happen once.

Related

How to connect to a MongoDB asynchronously?

I have a MongoDB database. I'm using Javascript and Node.js (using Mangoose) to access this database. I need to return an array with the name of all the collections in the database. So I use a code that follows.
let connection = mongoose.createConnection(process.env.MONGODB_URI + "/" + dbname);
// Wait 10 seconds for Mongoose to establish the connection.
await new Promise(r => setTimeout(r, 10000));
return connection.db.collections()
.then(stations=>stations.map(stations=>stations.collectionName))
.catch(reason => {
console.error("Error : "+reason);
return null;
});
The code above is working correctly. Now, I'm trying to do the same process asynchronously. For this, I am using the following code.
async function station_list_all(dbname){
return new Promise((resolve,reject) => {
try {
let connection = mongoose.createConnection(process.env.MONGODB_URI + "/" + dbname);
resolve(connection);
}catch(err){
reject(new Error("DB not found!"));
}
})
.then(connection => connection.db)
.then(db => db.collections())
.then(stations=>stations.map(station=>station.collectionName))
.catch(err => {
console.error("Error : "+err);
return null;
});
}
Unfortunately, instead of the collection names, I get the message: Error : TypeError: Cannot read property 'collections' of undefined.
I think db is returning undefined, but... Shouldn't the code wait until db has a valid value?
Try with async await:
try {
await mongoose.connect('mongo url with db name');
// other process
} catch (error) {
handleError(error);
}
Or you can connect using callback:
try {
mongoose.connect('mongo url with db name').then(()=>{
// other process
});
} catch (error) {
handleError(error);
}
In Same way you can try with promise also. There is no need use settimeout.
For More Ref Please visit: Mongoose Connections

javascript outer function finishing execution before inner function can modify return variable - asynch callback problems

I'm trying to write a simple recursive file walker using node's fs API and am making a basic error with callbacks, but am having a hard time seeing it.
the final return the_files; executes and returns an empty array [] instead of one filled with the file objects
the debug statement does log a populated array to the console with Dirent objects in the array.
the testdir structure is:
/testdir/a.txt
/testdir/b.txt
/testdir/c.txt
/testdir/somedir/d.txt
what am i missing?
function getTheFiles(mydir){
let the_files = [];
fs.readdir(mydir, {withFileTypes: true}, function (err, files){
if (err){
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
if (file.name[0] === '.') return; //skip dotfiles
if (file.isDirectory()){
getTheFiles(path.resolve(mydir, file.name));
}
if (file.isFile()){
the_files.push(file);
console.log(the_files); //debug
}
})
})
return the_files;
}
//output: []
I've gone through some excellent SO answers and blogs several times, but have not been able to massage my code into the right format:
Ref 1: Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
Ref 2: Get data from fs.readFile
Ref 3: forEach does not play well with async
Ref 4: Why is my variable unaltered after I modify it inside of a function? - Asynchronous code reference
Ref 5: How do you get a list of the names of all files present in a directory in Node.js? - doesn't help you do things with the result of the dir walk
Here's another attempt where I tried calling a function with the result of the async readdir -
function async_ver_getTheFiles(dir){
fs.readdir(dir, {withFileTypes: true}, function read(err, files){
if (err){
throw err;
}
filePacker(files);
});
}
function filePacker(files){
let the_files = [];
for (const file of files){
if (file.name[0] === '.') continue; //skip dotfiles
if (file.isFile()){
the_files.push(file.name);
}
if (file.isDirectory()){
async_ver_getTheFiles(file.name);
}
}
console.log(the_files); //returns the files
return the_files;
}
var result = async_ver_getTheFiles(dir);
console.log(result); //returns undefined
//output: returns the undefined result before printing the correct result from within filePacker()
I do have a sync version working fine:
function getTheFiles(dir){
let the_files = [];
let files = [];
try {
files = fs.readdirSync(dir, {withFileTypes: true});
}
catch (error) {
console.error('failed calling fsreaddirSync(dir, {withFileTypes: true} on value dir: \'' + dir + '\'\n');
console.error(error);
}
for (const file of files){
if (file.name[0] === '.') return; //skip dotfiles
if (file.isFile()){
let absolutePath = path.resolve(dir, file.name);
the_files.push(absolutePath);
}
else if (file.isDirectory()){
the_files.push(getTheFiles(path.resolve(dir, file.name)).toString()); //should be the_files.push(...getTheFiles(blah)) instead - see accepted answer
}
}
return the_files;
}
//output:
<path>/a.txt
<path>/b.txt
<path>/c.txt
<path>/somedir/d.txt
Maybe the whole thing gets a bit easier if you switch from the callback form of the fs API to the promise based version of the API. Ie, instead of
const fs = require('fs');
use
const fs = require('fs').promises;
This way, your fs function won't have a signature like
fs.readdir(dir, (err, files) => {})
anymore but will look like the following
fs.readdir(dir) : Promise<string[]>
and you can use them like the following
let files = await fs.readdir("/foo/bar");
This way, you can (more or less) take your synchronous approach, and everywhere, where you now have a
let baz = fs.foobarSync(...)
you can then use
let baz = await fs.foobar();
Thus your code would be like (see also the comments in the code)
// require the promise based API
let fs = require('fs').promises;
// be sure to make this function async
// so you can use "await" inside of it
async function getTheFiles(dir){
let the_files = [];
let files = [];
try {
files = await fs.readdir(dir, {withFileTypes: true});
}
catch (error) {
console.error('...');
console.error(error);
}
for (const file of files){
if (file.name[0] === '.') continue; // <-- use continue here instead of return to skip this file and check also the remaining files
if (file.isFile()){
let absolutePath = path.resolve(dir, file.name);
the_files.push(absolutePath);
}
else if (file.isDirectory()){
// get all files from the subdirectory
let subdirfiles = await getTheFiles(path.resolve(dir, file.name))
// as getTheFiles returns an array
// you can use the spread syntax to push all
// elements of that array intto the_files array
the_files.push(...subdirfiles);
}
}
return the_files;
}
getTheFiles("/foo/bar").then(files => {
console.log(files);
})
.catch(e => {
console.log(e);
})
There are multiple ways to do this, in all of these the function that calls getTheFiles won't have the result immediately (synchronously), but will have to wait and react when the result is ready.
With a callback:
function getTheFiles(mydir, cb){
let the_files = [];
fs.readdir(mydir, {withFileTypes: true}, function (err, files){
if (err){
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
if (file.name[0] === '.') return; //skip dotfiles
if (file.isDirectory()){
getTheFiles(path.resolve(mydir, file.name));
}
if (file.isFile()){
the_files.push(file);
console.log(the_files); //debug
}
})
// pass the files to the callback
cb(the_files)
})
}
getTheFiles('some_path', files => {
// use files here
})
With a Promise:
function getTheFiles(mydir){
let the_files = [];
// return a Promsie
return new Promise((resolve, reject) => {
fs.readdir(mydir, {withFileTypes: true}, function (err, files){
if (err){
// reject on error
reject('Unable to scan directory: ' + err)
}
files.forEach(function (file) {
if (file.name[0] === '.') return; //skip dotfiles
if (file.isDirectory()){
getTheFiles(path.resolve(mydir, file.name));
}
if (file.isFile()){
the_files.push(file);
console.log(the_files); //debug
}
})
// resolve the promise
resolve(the_files)
})
})
}
getTheFiles('some_path').then(files => {
// use files here
})
With a Promise and await:
function getTheFiles(mydir){
let the_files = [];
// return a Promsie
return new Promise((resolve, reject) => {
fs.readdir(mydir, {withFileTypes: true}, function (err, files){
if (err){
// reject on error
reject('Unable to scan directory: ' + err)
}
files.forEach(function (file) {
if (file.name[0] === '.') return; //skip dotfiles
if (file.isDirectory()){
getTheFiles(path.resolve(mydir, file.name));
}
if (file.isFile()){
the_files.push(file);
console.log(the_files); //debug
}
})
// resolve the promise
resolve(the_files)
})
})
}
async function someAsyncFunction() {
// wait until getTheFiles finished (but don't block other code from executing as sync would do)
const files = await getTheFiles('some_path')
// use files here
}
I think fs supports promises also natively with fs.promises or something similar, but I didn't look that up now. May be a good solution too.

chaining promises in functions

I have a small problem, how to create a promise chain in a sensible way so that the makeZip function will first add all the necessary files, then create the zip, and finally delete the previously added files? (The makeZip function also has to return a promise). In the example below I don't call deleteFile anywhere because I don't know exactly where to call it. when I tried to call it inside the add file function to delete the file immediately after adding it, for some unknown reason the console displayed the zip maked! log first and then file deleted.
const deleteFile = (file, result) => {
new Promise((resolve, reject) => {
fs.unlink(`./screenshots/${file}`, (err) => {
if (err) return reject(err);
console.log(`${file} deleted!`);
return resolve();
});
});
};
const addFile = (file) => {
new Promise((resolve, reject) => {
try {
zip.addLocalFile(`./screenshots/${file}`);
console.log(`${file} added`);
return resolve();
} catch {
return reject(new Error("failed to add file"));
}
});
};
const makeZip = () => {
Promise.all(fs.readdirSync("./screenshots").map((file) => addFile(file)))
.then(() => {
return new Promise((resolve, reject) => {
try {
zip.writeZip(`./zip_files/supername.zip`);
console.log("zip maked!");
resolve();
} catch {
return reject(new Error("failed making zip"));
}
});
})
.catch((err) => console.log(err));
};
the main cause of this is that you are not returning the promises you are instantiating in your function calls. Also I have some cool suggestion to make that can improve you code cleanliness.
[TIP]: Ever checked the promisify function in NodeJS util package, it comes with node and it is very convenient for converting functions that require callbacks as arguments into promise returning functions., I will demonstrate below anyhow.
// so I will work with one function because the problem resonates with the rest, so
// let us look at the add file function.
// so let us get the promisify function first
const promisify = require('util').promisify;
const addFile = (file) => {
// if addLocalFile is async then you can just return it
return zip.addLocalFile(`./screenshots/${file}`);
};
// okay so here is the promisify example, realized it wasn't applicable int the function
// above
const deleteFile = (file, result) => {
// so we will return here a. So because the function fs.unlink, takes a second arg that
// is a callback we can use promisify to convert the function into a promise
// returning function.
return promisify(fs.unlink)(`./screenshots/${file}`);
// so from there you can do your error handling.
};
So now let us put it all together in your last function, that is, makeZip
const makeZip = () => {
// good call on this, very interesting.
Promise.all(fs.readdirSync("./screenshots").map((file) => addFile(file)))
.then(() => {
return zip.writeZip(`./zip_files/supername.zip`);
})
.then(() => {
//... in here you can then unlink your files.
});
.catch((err) => console.log(err));
};
Everything should be good with these suggestions, hope it works out...
Thank you all for the hints, the solution turned out to be much simpler, just use the fs.unlinkSync method instead of the asynchronous fs.unlink.
const deleteFile = (file) => {
try {
fs.unlinkSync(`./screenshots/${file}`);
console.log(`${file} removed`);
} catch (err) {
console.error(err);
}
};
const addFile = (file) => {
try {
zip.addLocalFile(`./screenshots/${file}`);
console.log(`${file} added`);
deleteFile(file);
} catch (err) {
console.error(err);
}
};
const makeZip = () => {
fs.readdirSync("./screenshots").map((file) => addFile(file));
zip.writeZip(`./zip_files/supername.zip`);
console.log("zip maked!");
};

How to resolve a list of dynamically created Promises?

I am writing a git pre-commit hook and I want to be able to pass it an array of commands to execute, for it to execute them, and if any fail throw an error. Examples of these commands might be to run a test suite or a build.
I am having problems dynamically doing this using the promisified version of Node's child_process exec command.
So far I have a config file with 2 example commands:
config.js
const config = {
onPreCommit: ['git --version', 'node -v'],
};
module.exports = config;
If I pass in the values manually with this code I get the promise objects fulfilled with the correct values from the commands as I'd expect:
pre-commit hook
function preCommit() {
if (config.onPreCommit && config.onPreCommit.length > 0) {
Promise.allSettled([
exec(config.onPreCommit[0]),
exec(config.onPreCommit[1]),
]).then((results) => results.forEach((result) => console.log(result)));
}
}
preCommit();
However, if I try and do this dynamically like below, this throws an error:
function preCommit() {
if (config.onPreCommit && config.onPreCommit.length > 0) {
const cmdPromises = config.onPreCommit.map((cmd, i) => {
return new Promise((resolve, reject) => {
exec(cmd[i])
.then((res) => {
resolve(res);
})
.catch((err) => {
reject(err);
});
});
});
Promise.allSettled(cmdPromises).then((results) =>
results.forEach((result) => console.log(result))
);
}
}
preCommit();
Promises rejected with:
Error: Command failed: o
'o' is not recognized as an internal or external command,
operable program or batch file.
and
Error: Command failed: o
'o' is not recognized as an internal or external command,
operable program or batch file.
Thanks to the comment by mtkopone, the issue was in my map function.
Fixed by changing exec(cmd[i]) to exec(cmd)
Also updated function so hook works as intended:
function preCommit() {
if (config.onPreCommit && config.onPreCommit.length > 0) {
// Loop through scripts passed in and return a promise that resolves when they're done
const cmdPromises = config.onPreCommit.map((cmd) => {
return new Promise((resolve, reject) => {
exec(cmd)
.then((res) => {
resolve(res);
})
.catch((err) => {
reject(err);
});
});
});
// Make sure all scripts been run, fail with error if any promises rejected
Promise.allSettled(cmdPromises)
.then((results) =>
results.forEach((result) => {
if (result.status === 'rejected') {
console.log(result.reason);
process.exit(1);
}
})
)
.then(() => {
// If no errors, exit with no errors - commit continues
process.exit(0);
});
}
}
preCommit();

Node.js Async/Promise explanation with JIMP?

I'm working on a Discord bot that takes an uploaded picture attachment, saves it to a temporary file after editing, and then uploads it to the Discord server. Everything works, except for the timing. The sendImage function gets called after the generateImagePixel function, but tries to upload the temp image before jimp.write is finished, causing an ENOENT error. How would I fix my timing issue?
client.on('message', message => {
if (message.content.includes(config.prefix + 'pixel')) {
var tempname = d.getTime();
console.log(tempname);
generateImagePixel(message.attachments, tempname).then(() => sendImage(tempname, message))
}
});
function generateImagePixel(msg, name) {
return new Promise((resolve, reject) => {
msg.forEach(a => {
try {
jimp.read(a.url, function (err, image) {
const clone = image.clone();
clone.pixelate(8)
.rgba(true)
.filterType(0)
.write('./temp/' + name + '.png');
});
} catch(err) {
console.log(err);
}
});
resolve(name)
})
}
function sendImage(tempname, msg) {
msg.channel.send({
file: './temp/'+ tempname + '.png' // Or replace with FileOptions object
});
}
This is a typical example of executing asynchronous functions in a synchronous loop.
jimp.read is asycnhronous, so each call returns immediately before the actual stuff it does finishes. Therefore msg.forEach also finishes before jimp is done.
Once you are using something asynchronous, all you do has to be in asynchronous style:
function generateImagePixel(msg, name) {
const promises = msg.map(attachment => {
return jimp.read(attachment.url)
.then(image => {
return image
.pixelate(8)
.rgba(true)
.filterType(0)
.write('./temp/' + name + '.png');
})
.catch(console.error);
});
return Promise.all(promises)
.then(() => name);
}

Categories

Resources