Node.js Async/Promise explanation with JIMP? - javascript

I'm working on a Discord bot that takes an uploaded picture attachment, saves it to a temporary file after editing, and then uploads it to the Discord server. Everything works, except for the timing. The sendImage function gets called after the generateImagePixel function, but tries to upload the temp image before jimp.write is finished, causing an ENOENT error. How would I fix my timing issue?
client.on('message', message => {
if (message.content.includes(config.prefix + 'pixel')) {
var tempname = d.getTime();
console.log(tempname);
generateImagePixel(message.attachments, tempname).then(() => sendImage(tempname, message))
}
});
function generateImagePixel(msg, name) {
return new Promise((resolve, reject) => {
msg.forEach(a => {
try {
jimp.read(a.url, function (err, image) {
const clone = image.clone();
clone.pixelate(8)
.rgba(true)
.filterType(0)
.write('./temp/' + name + '.png');
});
} catch(err) {
console.log(err);
}
});
resolve(name)
})
}
function sendImage(tempname, msg) {
msg.channel.send({
file: './temp/'+ tempname + '.png' // Or replace with FileOptions object
});
}

This is a typical example of executing asynchronous functions in a synchronous loop.
jimp.read is asycnhronous, so each call returns immediately before the actual stuff it does finishes. Therefore msg.forEach also finishes before jimp is done.
Once you are using something asynchronous, all you do has to be in asynchronous style:
function generateImagePixel(msg, name) {
const promises = msg.map(attachment => {
return jimp.read(attachment.url)
.then(image => {
return image
.pixelate(8)
.rgba(true)
.filterType(0)
.write('./temp/' + name + '.png');
})
.catch(console.error);
});
return Promise.all(promises)
.then(() => name);
}

Related

chaining promises in functions

I have a small problem, how to create a promise chain in a sensible way so that the makeZip function will first add all the necessary files, then create the zip, and finally delete the previously added files? (The makeZip function also has to return a promise). In the example below I don't call deleteFile anywhere because I don't know exactly where to call it. when I tried to call it inside the add file function to delete the file immediately after adding it, for some unknown reason the console displayed the zip maked! log first and then file deleted.
const deleteFile = (file, result) => {
new Promise((resolve, reject) => {
fs.unlink(`./screenshots/${file}`, (err) => {
if (err) return reject(err);
console.log(`${file} deleted!`);
return resolve();
});
});
};
const addFile = (file) => {
new Promise((resolve, reject) => {
try {
zip.addLocalFile(`./screenshots/${file}`);
console.log(`${file} added`);
return resolve();
} catch {
return reject(new Error("failed to add file"));
}
});
};
const makeZip = () => {
Promise.all(fs.readdirSync("./screenshots").map((file) => addFile(file)))
.then(() => {
return new Promise((resolve, reject) => {
try {
zip.writeZip(`./zip_files/supername.zip`);
console.log("zip maked!");
resolve();
} catch {
return reject(new Error("failed making zip"));
}
});
})
.catch((err) => console.log(err));
};
the main cause of this is that you are not returning the promises you are instantiating in your function calls. Also I have some cool suggestion to make that can improve you code cleanliness.
[TIP]: Ever checked the promisify function in NodeJS util package, it comes with node and it is very convenient for converting functions that require callbacks as arguments into promise returning functions., I will demonstrate below anyhow.
// so I will work with one function because the problem resonates with the rest, so
// let us look at the add file function.
// so let us get the promisify function first
const promisify = require('util').promisify;
const addFile = (file) => {
// if addLocalFile is async then you can just return it
return zip.addLocalFile(`./screenshots/${file}`);
};
// okay so here is the promisify example, realized it wasn't applicable int the function
// above
const deleteFile = (file, result) => {
// so we will return here a. So because the function fs.unlink, takes a second arg that
// is a callback we can use promisify to convert the function into a promise
// returning function.
return promisify(fs.unlink)(`./screenshots/${file}`);
// so from there you can do your error handling.
};
So now let us put it all together in your last function, that is, makeZip
const makeZip = () => {
// good call on this, very interesting.
Promise.all(fs.readdirSync("./screenshots").map((file) => addFile(file)))
.then(() => {
return zip.writeZip(`./zip_files/supername.zip`);
})
.then(() => {
//... in here you can then unlink your files.
});
.catch((err) => console.log(err));
};
Everything should be good with these suggestions, hope it works out...
Thank you all for the hints, the solution turned out to be much simpler, just use the fs.unlinkSync method instead of the asynchronous fs.unlink.
const deleteFile = (file) => {
try {
fs.unlinkSync(`./screenshots/${file}`);
console.log(`${file} removed`);
} catch (err) {
console.error(err);
}
};
const addFile = (file) => {
try {
zip.addLocalFile(`./screenshots/${file}`);
console.log(`${file} added`);
deleteFile(file);
} catch (err) {
console.error(err);
}
};
const makeZip = () => {
fs.readdirSync("./screenshots").map((file) => addFile(file));
zip.writeZip(`./zip_files/supername.zip`);
console.log("zip maked!");
};

Why isn't my async function waiting for the promise to be fulfilled

I am using ldapjs to query users from an ldap server.
If I put all the code just in a single script without using functions, the query works and I get the results I need.
I am now trying to use expressjs to serve a rest endpoint to enable querying of the ldap server, so I moved the ldapjs client.search code into a async function with a promise surrounding the actual search code.
After the promise code, I have a line which exercises the promise using await and stores the results of the promise in a variable. I then return that variable to the calling function which will eventually send the results back as a json-formatted string to the requesting browser.
The problem I am seeing is that the console.log() of the returned results is undefined and appears before the console.log statements inside the promise code. So it looks like the async function is returning before the promise is fulfilled, but I don't see why because in all the examples of promises and async/await I have seen this scenario works correctly.
Below is a sample script without the expressjs part to just make sure everything works correctly.
// script constants:
const ldap = require('ldapjs');
const assert = require('assert');
const ldapServer = "ldap.example.com";
const adSuffix = "dc=example,dc=com"; // test.com
const client = getClient();
const fullName = "*doe*";
var opts = {
scope: "sub",
filter: `(cn=${fullName})`,
attributes: ["displayName", "mail", "title", "manager"]
};
console.log("performing the search");
let ldapUsers = doSearch(client, opts);
console.log("Final Results: " + ldapUsers);
function getClient() {
// Setup the connection to the ldap server
...
return client;
}
async function doSearch(client, searchOptions) {
console.log("Inside doSearch()");
let promise = new Promise((resolve, reject) => {
users = '{"users": [';
client.search(adSuffix, searchOptions, (err, res) => {
if (err) {
console.log(err);
reject(err)
}
res.on('searchEntry', function(entry) {
console.log("Entry: " + users.length);
if (users.length > 11) {
users = users + "," + JSON.stringify(entry.object);
} else {
users = users + JSON.stringify(entry.object);
}
});
res.on('error', function(err) {
console.error("Error: " + err.message);
reject(err)
});
res.on('end', function(result) {
console.log("end:");
client.unbind();
users = users + "]}";
resolve(users)
});
});
});
// resolve the promise:
let result = await promise;
console.log("After promise has resolved.");
console.log(result);
return result
}
The output from the console.log statements is as follows:
Setting up the ldap client.
ldap.createClient succeeded.
performing the search
Inside doSearch()
Final Results: [object Promise]
Entry: 11
end:
After promise has resolved.
{"users": [{"dn":"cn=john_doe"}]}
I did strip out the code which creates the ldapjs client and redacted the company name, but otherwise this is my code.
Any ideas on why the doSearch function is returning before the promise is fulfilled would be greatly appreciated.
As #danh mentioned in a comment, you're not awaiting the response from doSearch. Since doSearch is an async function it will always return a promise, and thus must be awaited.
As a quick and dirty way to do that you could wrap your call in an immediately invoked asynchronous function like so:
// ...
(async () => console.log(await doSearch(client, opts)))();
// ...
For more info you might check out the MDN docs on asynchronous functions
I think there are a few issues in the provided code snippet. As #danh pointed out you need to await the doSearch call. However you may have not done that already since you may not be using an environment with a top async. This likely means you'll want to wrap the call to doSearch in an async function and call that. Assuming you need to await for the search results.
// script constants:
const ldap = require('ldapjs');
const assert = require('assert');
const ldapServer = "ldap.example.com";
const adSuffix = "dc=example,dc=com"; // test.com
const client = getClient();
const fullName = "*doe*";
function getClient() {
// Setup the connection to the ldap server
...
return client;
}
async function doSearch(client, searchOptions) {
console.log("Inside doSearch()");
return new Promise((resolve, reject) => {
users = '{"users": [';
client.search(adSuffix, searchOptions, (err, res) => {
if (err) {
console.log(err);
reject(err)
}
res.on('searchEntry', function(entry) {
console.log("Entry: " + users.length);
if (users.length > 11) {
users = users + "," + JSON.stringify(entry.object);
} else {
users = users + JSON.stringify(entry.object);
}
});
res.on('error', function(err) {
console.error("Error: " + err.message);
reject(err)
});
res.on('end', function(result) {
console.log("end:");
client.unbind();
users = users + "]}";
console.log(result);
resolve(users)
});
});
});
}
const opts = {
scope: "sub",
filter: `(cn=${fullName})`,
attributes: ["displayName", "mail", "title", "manager"]
};
(async function runAsyncSearch () {
console.log("performing the search");
try {
const ldapUsers = await doSearch(client, opts); // Await the async results
console.log("After promise has resolved.");
console.log("Final Results: " + ldapUsers);
} catch (err) {
console.error(err.message);
}
})(); // Execute the function immediately after defining it.

Node.js how to synchronously read lines from stream.Readable

I'm interacting with a child process through stdio, and I need to wait for a line from childProcess.stdout each time I write some command to childProcess.stdin.
It's easy to wrap an asynchronous method for writing like below:
async function write(data){
return new Promise(resolve=>{
childProcess.stdin.write(data,()=>resolve());
})
}
However, it turns out quite difficult when it comes to reading, since data from stdout must be processed using listeners. I've tried below:
const LineReader = require("readline")
const reader = LineReader.createInterface(childProcess.stdout);
async function read(){
return new Promise(resolve=>{
reader.once("line",line=>resolve(line));
})
}
But it always returns the first line.
I know I may achieve this using setInterval, And I've already implemented the functionality this way. But it obviously has an impact on the performance, so now I'm trying to optimize it by wrapping it into an asynchronous method.
Any suggestions and solutions will be appreciated!
Well, I ended up with something pretty similar to what you were trying. It makes some assumptions that are mentioned in the code and needs more complete error handling:
const cp = require('child_process');
const readline = require('readline');
const child = cp.spawn("node", ["./echo.js"]);
child.on('error', err => {
console.log(err);
}).on('exit', () => {
console.log("child exited");
});
const reader = readline.createInterface({ input: child.stdout });
// this will miss line events that occurred before this is called
// so this only really works if you know the output comes one line at a time
function nextLine() {
return new Promise(resolve => {
reader.once('line', resolve);
});
}
// this does not check for stdin that is full and wants us to wait
// for a drain event
function write(str) {
return new Promise(resolve => {
let ready = child.stdin.write(str, resolve);
if (!ready) {
console.log("stream isn't ready yet");
}
});
}
async function sendCmd(cmd) {
// get line reader event handler installed so there's no race condition
// on missing the return event
let p = nextLine();
// send the command
await write(cmd);
return p;
}
// send a sequence of commands and get their results
async function run() {
let result1 = await sendCmd("hi\n");
console.log(`Got '${result1}'`);
let result2 = await sendCmd("goodbye\n");
console.log(`Got '${result2}'`);
let result3 = await sendCmd("exit\n");
console.log(`Got '${result3}'`);
}
run().then(() => {
console.log("done");
}).catch(err => {
console.log(err);
});
And, for testing purposes, I ran it with this echo app:
process.stdin.on("data", data => {
let str = data.toString();
let ready = process.stdout.write("return: " + str, () => {
if (str.startsWith("exit")) {
process.exit();
}
});
if (!ready) {
console.log("echo wasn't ready");
}
});

Correct way to wait for an event before returning

I'm using a csv-parser npm module to read a csv file, process it, and then create a statistical model based on the data. The issue I'm having is that the other file that uses this isn't waiting for the model to finish before moving on, so it ends up trying to use values/methods that are not yet defined. Based on other posts, this is what I have:
this.read = async function(){
return new Promise((resolve, reject) => {
console.log("in mv read");
fs.createReadStream("./assets/stats-csv.csv")
.pipe(csv({
mapValues: ({ header, index, value }) => this.toNumber(header, value)
}))
.on('data', (data) => this.process(data))
.on('error', err => {
reject(err);
})
.on('end', () => {
this.model();
console.log('finished mv model');
resolve(true);
});
})
}
And then the other file uses the method the following way:
this.train_mv = async function(){
console.log("in train mv wrapper")
const success = await this.mvReg.read();
return success;
//console.log(success);
}
I added the "success" bit just to see if returning and using a value from the promise would help, but it doesn't. The function just moves on and doesn't even go to the "return success" line. Am I missing something about async/await? Shouldn't the train_mv function pause and wait until the promise resolves? I would appreciate any help. Thanks!

Order of script execution with Promise

I know that this question is almost the same as this one: Execution order of Promises but can someone explain to me where is my mistake?
I have the next functions:
// The main function
function startTesting() {
console.info("--- Thanks! Testing is running... ---");
checkFolderExistence(dirPath)
.then(checkFolderContent)
.then(searchForImportFolder)
.then(connectToDB)
.catch(err => console.error("*** ERROR *** " + err));
}
function checkFolderExistence(path) {
console.info('--- Checking the folder "' + path + '" existence... ---');
let promise = new Promise(function(resolve, reject) {
fs.readdir(path, (err) => {
if(err) {
console.error('*** ERROR **** The folder "C:\\For_testing" doesn\'t exist. Testing is stopped!!! ***');
} else {
console.info("--- The folder \"C:\\For_testing\" exists... ---");
resolve(path);
};
});
});
return promise;
}
function checkFolderContent(path) {
console.info('--- Checking the folder "' + path + '" content... ---');
filesArray = fs.readdirSync(path);
if(filesArray.length == 0) {
console.error('*** ERROR *** There are no any files in ' + path + '. Testing is stopped!!! ***');
} else {
console.info('--- The folder is checked. It contains the next files: ---');
for(let i = 0; i < filesArray.length; i++) {
console.info(filesArray[i]);
}
};
}
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
fs.readdir(destFolderPath64, (err) => {
if(err) {
fs.readdir(destFolderPath32, (err) => {
if(err) {
console.error('*** ERROR *** The folder ".../Import" was not found ***');
} else {
console.info('--- The folder ".../Import" was successfully found... ---');
trueDestPath = destFolderPath32;
}
});
} else {
console.info('--- The folder "C:/Program Files (x86)/StoreLine/Office/Import" was successfully found... ---');
trueDestPath = destFolderPath64;
}
});
}
function connectToDB() {
console.info('--- Connecting to the database... ---');
let pool = new sql.ConnectionPool(config);
pool.connect()
.then(pool => {
console.info("--- Connected to the database! ---");
readDB(pool)
.then(function() {
console.info("--- All needed information from DB was successfully received ---");
})
.catch(err => console.error("*** ERROR *** " + err));
})
.catch(err => {
pool = new sql.ConnectionPool(configWithoutPassw);
pool.connect()
.then(pool => {
console.info("--- Connected to the database without the password! ---");
readDB(pool)
.then(function() {
console.info("--- All needed information from the DB was successfully received ---");
})
.catch(err => console.error("*** ERROR ***" + err));
})
.catch(err => {
console.error("*** ERROR *** Can't connect to the DB ***")
sql.close();
});
});
}
I need a strict order of execution of the functions: checkFolderContent => searchForImportFolder => connectToDB.
In fact the execution is the next: checkFolderContent is executed fully, then searchForImportFolder starts execute (I can see the line "--- Searching for ".../Import" folder... ---" in a console) but right after that connectToDB starts and the next line "--- Connecting to the database... ---" is appeared. And after that line I see "--- The folder ".../Import" was successfully found... ---" from the previous function.
What did I do wrong? I've read that in .then() function should return a promise. How can I do that?
searchForImportFolder doesn't return a promise, so the chain doesn't wait for that promise to complete. Do the same thing in searchForImportFolder that you've done in checkFolderExistence: Wrap the callback-style API in a promise.
A couple of notes:
checkFolderExistence should call reject in the error path; it doesn't currently.
Node provides a promisify function you can use to wrap callback-style API calls in promises, rather than doing it manually. Or you could use the promisify-fs npm module, or the promisify npm module that lets you promisify an entire API at once, or Node's own experimental promises API for fs.
You might want to make checkFolderContent async (again using promises) rather than using readdirSync, which holds up the main thread waiting on I/O.
If you're using any recent version of Node, you might want to switch to using async functions and the await keyword, as it lets you write your logical flow rather than writing a bunch of callbacks.
searchForImportFolder should return its result rather than setting a global.
So for instance, here are checkFolderExistence and searchForImportFolder using util.promisify (these assume searchForImportFolder should return its result, so you'll have to adjust code using it):
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
function checkFolderExistence(path) {
console.info('--- Checking the folder "' + path + '" existence... ---');
return readdirPromise(path)
.then(path => {
console.info("--- The folder \"C:\\For_testing\" exists... ---");
return path;
})
.catch(error => {
console.error('*** ERROR **** The folder "C:\\For_testing" doesn\'t exist. Testing is stopped!!! ***');
});
}
// ...
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
return readdirPromise(destFolderPath64)
.then(() => {
console.info('--- The folder "C:/Program Files (x86)/StoreLine/Office/Import" was successfully found... ---');
return destFolderPath64;
})
.catch(() => readdirPromise(destFolderPath32))
.then(() => {
console.info('--- The folder ".../Import" was successfully found... ---');
return destFolderPath32;
})
.catch(error => {
console.error('*** ERROR *** The folder ".../Import" was not found ***');
throw error;
});
}
If you don't need all that logging, checkFolderExistence just becomes readdirPromise, and searchForImportFolder becomes:
Or if you don't need all that logging (presumably that was for debugging):
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
// ...
function searchForImportFolder() {
console.info('--- Searching for ".../Import" folder... ---');
return readdirPromise(destFolderPath64)
.then(() => {
return destFolderPath64;
})
.catch(() => readdirPromise(destFolderPath32));
}
And here they are using util.promisify and async/await:
Or using util.promisify and async/await:
const { promisify } = require("util");
const readdirPromise = promisify(fs.readdir);
// ...
async function searchForImportFolder() {
try {
await readdirPromise(destFolderPath64);
return destFolderPath64;
} catch (error) {
await readdirPromise(destFolderPath32);
return destFolderPath32;
}
}
If you want to avoid searching for the two different folders repeatedly, a simple tactic is just to remember the promise from searchForImportFolder and then use then on it any time you need that value:
const importFolderPromise = searchForImportFolder();
...then when you need it:
importFolderPromise.then(folder => {/*...*/});
...or in an async function:
const folder = await importFolderPromise;
The search will only happen once.

Categories

Resources