I am having a hard time understanding JavaScript Promises. I am searching on of my Mongoose models for objects that meet a certain condition and if they exist, I want to make the object into a plain JS object and add a property onto it.
Unfortunately, I am unable to wrap my head around how I can ensure my forEach loop will run completely before my promise ends up resolving. Please see my code.
// Called to check whether a user has participated in a given list of challenges
participationSchema.statics.getParticipation = function(user, challenges) {
return new Promise((resolve, reject) => {
challengesArray = [];
challenges.forEach((challenge) => {
// Model#findOne() is Async--how to ensure all these complete before promise is resolved?
Participation.findOne({user, challenge})
.then((res) => {
if (res) {
var leanObj = challenge.toObject();
leanObj.participation = true;
challengesArray.push(leanObj);
}
})
.catch(e => reject(e));
})
console.log("CHALLENGES ARRAY", challengesArray); // Challenges Array empty :(
resolve(challengesArray);
});
}
I've looked through similar questions, but am unable to get to an answer. Appreciate the help.
So, what is happening when you call getParticipation is that the forEach loop runs all the way and all individual promises for Participation.findOne are created but not yet resolved. The execution doesn't wait for them to resolve and continues after the forEach, resolving the top-level promise challengesArray, which is still empty at that point. Sometime after, the promises created in the forEach start resolving but their results are now lost.
Also, as Bergi mentioned in the comments, nesting promises is considered an anti-pattern; promises should be chained, not nested.
What you want is to use something like Promise.all to wait for all of your promises to finish first, then you filter out all non-existing results and finally return the array.
participationSchema.statics.getParticipation = function(user, challenges) {
return Promise.all(challenges.map(challenge => {
return Participation.findOne({user, challenge}).then(result => {
if (result) {
var leanObj = challenge.toObject();
leanObj.participation = true;
return leanObj;
}
});
})
// at this point, results contains an array of `leanObject` and `undefined` depending if the `findOne` call returned anything and the code withing the `if` above was run
.then((results) => {
return results.filter(result => !!result) // filter out `undefined` results so we only end up with lean objects
});
}
Related
I have been struggling with this issue for a week and have researched myself close to death. I am a total newbie. I have managed to grasp the crux of promises, but I am failing to see how to include this in a loop.
I have an app that is looking through an array. There is some validation of the array against a mongoose database (which is taking time to run). I am trying to push items into a new array based on some of this validation. I know the validation is working because of the console log in the loop. However my final array is not waiting for the loop to finish. Which means I need to put the loop into a promise, or well I think, but the issue is that I don't know how to resolve it. Current output is a blank array instead of the validated array. Here is my code:
//dummy data of an array - this is originally extracted from a mongoose DB and works (it's my first promise).
const appArray = ["5f8f25d554f1e43f3089ea5d",
"5f8f25e854f1e43f3089ea5e",
"5f8f25f454f1e43f3089ea5f",
"5f8f314ab92c7f406f28b83a",
"5f8fe50a9d44694cad91a01b",
"5f92e8a75d848870e015dff3",
"5f92e8b35d848870e015dff4",
"5f92e8cb5d848870e015dff5",
"5f8fe51d9d44694cad91a01c"];
//the second promise takes the array above and validates it against another collection on mongoose
function myPromise2 (response){
return new Promise((resolve, reject) => {
let appoints = [];
response.forEach(e => {
//loop through each item of the array and look against Appointment collection
Appointment.findById(e, function(err, foundApp){
//some validation supposed to happen here and then pushed into a new array
appoints.push(foundApp);
console.log(appoints);
})
})
//once completed supposed to resolve and return
resolve(appoints);
})
};
myPromise2(appArray).then((response) => {console.log(response)});
Here is an example which should work. Add a promise for each element to the array and then resolve the outer function if all promises have resolved.
// dummy data of an array - this is originally extracted from a mongoose DB and works (it's my first promise).
const appArray = ["5f8f25d554f1e43f3089ea5d",
"5f8f25e854f1e43f3089ea5e",
"5f8f25f454f1e43f3089ea5f",
"5f8f314ab92c7f406f28b83a",
"5f8fe50a9d44694cad91a01b",
"5f92e8a75d848870e015dff3",
"5f92e8b35d848870e015dff4",
"5f92e8cb5d848870e015dff5",
"5f8fe51d9d44694cad91a01c"];
// the second promise takes the array above and validates it against another collection on mongoose
function myPromise2 (response) {
let appoints = [];
response.forEach(e => {
appoints.push(new Promise((resolve) => {
//loop through each item of the array and look against Appointment collection
Appointment.findById(e, function(err, foundApp) {
//some validation supposed to happen here and then pushed into a new array
resolve(foundApp);
})
}))
})
return Promise.all(appoints)
};
myPromise2(appArray).then((response) => {console.log(response)});
Points to address:
Use the promise that mongoose provides through the .exec() method. This way you don't need new Promise
Collect these individual promises in an array (use .map instead of .forEach), and pass this array to Promise.all
If you do this, the code for myPromise2 reduces to the following:
function myPromise2 (response){
return Promise.all(response.map(e => Appointment.findById(e).exec()));
}
here is my suggestion:
const appArray = [
"5f8f25d554f1e43f3089ea5d",
"5f8f25e854f1e43f3089ea5e",
"5f8f25f454f1e43f3089ea5f",
"5f8f314ab92c7f406f28b83a",
"5f8fe50a9d44694cad91a01b",
"5f92e8a75d848870e015dff3",
"5f92e8b35d848870e015dff4",
"5f92e8cb5d848870e015dff5",
"5f8fe51d9d44694cad91a01c"
];
function myPromise2 (response){
return Promise.all(response.map(id) => {
return Appointment.findById(id).exec();
})
};
myPromise2(appArray)
.then(console.log) // response array
.catch(// handle errors)
// you can also async/await the calling part
you can also use one of:
Promise.allSettled
Prmoise.any (es2021)
Promise.race
it's just depends on how you would like to handle the responses/failures.
A good alternative to consider maybe Async/Await and have a look Async_await. This will hopefully answer all your issues
It's probably a good idea to look into how the JS event loop system works guide here,
I'm trying to use Promise.all() with an array of Promises that is being populated inside of a foreach looop right before but it seems like the Promise.all() is not waiting the promises to be all completed before executing its callback.
What's wrong with the following code? (I tried to simplify it before posting, so parts of it might not make complete sense, but that promised and loops are all there).
class test {
constructor(sql) {
Promise.all([this.sync(sql, 0), this.sync(sql, 1)]).then((data) => {
console.log(data);
});
}
sync(sql, id = 0) {
return new Promise((resolve, reject) => {
request.get('http://localhost/test/' + id, {
json: true
}, (req, res) => {
var promises = [];
res.body['items'].forEach(item => {
promises.push(new Promise((resolve, reject) => {
this.existingRecord(sql, item['id']).then(() => {
resolve(false);
}).catch(() => {
this.add(sql, item).then(resolve(id));
})
}))
});
Promise.all(promises).then((data) => resolve(data));
});
});
}
add(sql, data) {
return new Promise((resolve, reject) => {
console.log('Inserting ' + data['id']);
var request = new sql.Request();
var query = `INSERT INTO test (col1, col2) VALUES (${utils.prepareInsertdata(data)})`;
request.query(query, (err, result) => {
if (err) {
console.log('ERROR INSERTING: ' + data['id']);
console.log(err);
}
resolve();
});
});
}
}
First off, you're making it much harder to write good, clean, error-free code when you have a mix of promises and regular callbacks in your control flow. I find that the best way to write asynchronous code using promises is to first take any asynchronous operations that are not promise based and create promise-based wrappers for them and then write my logic and control flow using only promises. This makes a consistent path for flow of control and for error handling and it removes the mess of promisifying things from the actual main logic.
Then, I see several significant issues in your code.
Asynchronous operations in the constructor
It's almost never a good idea to put asynchronous operations in the constructor. This is because the constructor HAS to return the object itself so that leaves no simple way to communicate back to the code that created your object when the asynchronous operations are actually done and if they succeeded of failed. It is not entirely clear to me what you're trying to accomplish with those async operations, but this is likely a bad design pattern. I favor a factory function that returns a promise that resolves to the new object for combining the creation of an object with asynchronous operations. This gives you everything you need, a fully formed object, knowledge of when the async operations are done and an ability to have error handling for the async operations. You can see more about this factory function option and some other design options here:
Asynchronous operations in constructor
Improve .then() handler construction
When you do this:
this.add(sql, item).then(resolve(id));
You are calling resolve(id) immediately and passing that to .then() rather than waiting for the .then() handler to be called before calling resolve(id). All of this is complicated because you're mixing regular callbacks and promises.
Creating new wrapped promises rather than just returning existing promises
This is related to your mix of regular callbacks and regular promises, but you'd much rather just return an existing promise than wrap it in a new promise that you have to manually resolve and reject. More than half the time, you will miss proper error handling when manually wrapping things in a new promise and it just results in more code than is needed.
Race Conditions
In any sort of multi-user database environment, you can't write database code such as:
if (record exists) {
do one thing
} else {
create new record
}
This is a race condition. If some other database request comes in during the processing of this, it could change the database in the middle of this and you'd be trying to create a record that just got created by another piece of code.
The usual solution varies by database (and you don't say exactly which database library you're using). Usually, you want to let the database manage the creation of unique records making it so that a duplicate record (by whatever key you're managing uniqueness in this table by) isn't allowed in the database and the concurrency of that is managed by the database itself. Some databases have an atomic operation such as findOrCreate() that will find an existing record or create a new one in an atomic fashion. Other databases have other approaches. But, it's important to make sure that adding unique records to the database is an atomic operation that can't ever create unwanted duplicates.
I'd suggest this implementation:
// use promise version of request library (already promisified for us)
const rp = require('request-promise');
class test {
constructor() {
}
init(sql) {
return Promise.all([this.sync(sql, 0), this.sync(sql, 1)]).then((data) => {
console.log(data);
// do something with the data here - probably store it in instance data
});
}
sync(sql, id = 0) {
return rp.get('http://localhost/test/' + id, {json: true}).then(res => {
// process all items
return Promise.all(res.body.items.map(item => {
return this.existingRecord(sql, item.id).then(() => {
return false;
}).catch(() => {
// it's probably bad form here to do this for all possible database errors
// probably this should be looking for a specific error of id not found
// or something like that.
// This is also likely a race condition. You would typically avoid the race
// condition by making the item key unique in the database and just doing an add and letting
// the database tell you the add failed because the item already exists
// This will allow the database to control the concurrency and avoid race conditions
return this.add(sql, item);
});
}));
});
}
}
// factory function that returns promise that resolves to a new object
// don't use new test() elsewhere
function createTestObj(sql) {
let t = new test();
return t.init(sql).then(() => {
// resolve to our new object
return t;
});
}
For your add() method, I'd switch to using the promise interface in your sql database. There should either be one built-in or a 3rd party package that will add one on top of your database interface. This will prevent the manual creation of promises and the incomplete error handling in your add() method.
I'm trying to export an array of data objects for later use, though I can use promises to wait till all data has been added to the array before logging, I noticed when I went to use that data I couldn't because even with Promise.all, the length of the array was still zero as if nothing had changed.
I tried having the console log each time the doc.data() was pushed to the exportArray and I noticed that it logs that after it outputs the array. So for example...
Expected Output
doc.data() // For Each doc
Array[] // Filled with data and length 54
Length: 54
Actual Output
Array[] // Filled with data and length 54
Length: 0
doc.data() // For Each doc
let exportArray = [];
let promises = [];
db.collection('lists').doc('List 1').collection("members")
.get().then(function(querySnapshot) {
querySnapshot.forEach(function(doc) {
promises.push(
new Promise(function (resolve, reject) {
exportArray.push(doc.data());
resolve();
console.log('before');
})
);
});
});
Promise.all(promises).then(function () {
console.log(exportArray); // Logs correctly with all data with length 54
console.log(exportArray.length); // Logs as 0 for some reason
});
Ideally this should output the exportArray with it's data AND the length being 54. However it does output the data but the length is output as 0. (and yes I clicked on the data array in console and it shows a length of 54)
Why does the array get populated but I'm unable to use methods on it like exportArray.length correctly?
You must call Promise.all on promises after it has been filled with promises, which happens asynchronously in a then callback. Now you execute it synchronously when none of that has happened yet.
So do:
db.collection('lists').doc('List 1').collection("members").get().then(function(querySnapshot) {
let promises = querySnapshot.docs.map(function(doc) { // <-- use docs.map
return doc.data(); // <-- just return `data()`. No need for a new promise
});
// Must be here:
return Promise.all(promises).then(function (exportArray) { // <--- data arg!
console.log(exportArray);
console.log(exportArray.length);
});
});
Notes:
There is no need for new Promise when you have the value to resolve with readily available.
Instead of forEach, get the array from the query snapshot with .docs and the JS built-in .map().
The fact that you see the array in the console but with a length 0 is the behaviour of the console: it only logs the reference to the array, but then when you expand it in the console, it has in the mean time been populated; so you see the data. But it was not there at the moment of the logging, which is what the length: 0 is telling you.
Simplification
According to the firebase documentation, doc.data() returns the data, not a promise, so there is no reason to use Promise.all, a simple map should suffice:
db.collection('lists').doc('List 1').collection("members").get().then(function(querySnapshot) {
return querySnapshot.docs.map(function(doc) {
return doc.data();
});
}).then(function (exportArray) {
console.log(exportArray);
console.log(exportArray.length);
});
Since db.collection('lists').doc('List 1').collection("members").get() returns immediately with a promise that resolves only after the query completes, your code will go on to execute Promise.all() against an empty list and also return immediately because there's nothing to wait on. Some time after that, your snapshots will be ready and promises will be populated.
You should call Promises.all() only after the entire array has been populated.
I'm not very familiar with the db you use, but just fixing the antipatterns in your code should solve your problem. I guess that's about what your code should look like.
db.collection('lists')
.doc('List 1')
.collection("members")
.get()
.then(querySnapshot => querySnapshot.map(doc => doc.data())
.then(promises => Promise.all(promises))
.then(exportArray => {
console.log(exportArray); // Logs correctly with all data with length 54
console.log(exportArray.length); // Logs as 0 for some reason
})
About the antipattern:
avoid using new Promise() and similar. it's rarely necessary, ususally you already have a Promise chanin you can derive from.
Promises are wrapping values that you don't have yet. Don't try to "unwrap" these values by doing something along the lines of somePromise.then(value => { externalVariable = value; }) or in your case it's an Array.
You now have a variable that will (at some point in the future) contain the value you want, but at the moment is empty/invalid. So now you have to implement your own state management to check when the value has become valid; basically duplicating most of the logic of the Promise ;)
so basically i have a web application that retrieves data from firebase. and since it takes a lot of time to retrieve data from firebase, i used promise to make my code to populate at the right time. here is my code:
var promise = getDataFirebase();
promise.then(function () {
console.log(Collect);
console.log("firsst");
return getDataFirebaseUser();
}).then(function () {
console.log("Second");
});
function getDataFirebase() {
return new Promise(function (resolve, reject) {
refReview.on("value", function (snap) {
var data = snap.val();
for (var key in data) {
Collect.push({
"RevieweeName": data[key].revieweeID.firstname.concat(" ", data[key].revieweeID.lastname),
"ReviewerName": data[key].reviewerID.firstname.concat(" ", data[key].reviewerID.lastname),
rating: data[key].rating,
content: data[key].content,
keyOfReviewee: data[key].revieweeID.userID
})
var getDataToUsers = firebase.database().ref("users").child(data[key].revieweeID.userID);
getDataToUsers.once("value", async function (snap) {
var fnLn = snap.val();
var first = fnLn.isTerminated;
console.log("terminateStatus", first);
});
}//end of for loop
resolve();
}); //end of snap
});
}
so in function getDataFirebase, data is retrieved from firebase and is push to an array called , Collect. So, after pushing 1 row, it query again to the firebase for another table of data then continues the loop. The problem here is that, i wanted to finish all processes before it resolves the promise.
the output of console according to the code is as follow:
Collect (array)
first
Second
terminateStatus, 1
it must be
Collect (array)
first
terminateStatus,1
second
I'm not 100% sure how your code is working but it looks like you're doing the right thing on refReview.on("value"): creating a promise before calling Firebase, and then resolving it afterwards. But you're not doing that on getDataToUsers.once("value"). There, you're firing the event and not waiting for it to return — the for-loop continues on, and all the callbacks are processed later, but resolve is at the end of the for-loop, so it's too late by then.
My guess is that you thought the async keyword would cause the for-loop to wait for that job to complete, but actually all it does here is cause the callback to return a promise — which is ignored by the on function.
You have a few options, but probably the best will be to use Promise.all, which accepts an array of promises and waits for them all to be resolved — but you should probably use Firebase's Promise API rather than bolting promises onto the event API. So it'll look something like:
refReview.once('value')
.then(snap => Promise.all(snap.val().map(item =>
firebase.database()
.ref("users")
.child(item.revieweeID.userID)
.once('value')
.then(snap => {
console.log('terminated');
])
})));
(except with the actual functionality added in, natch)
I have a function that reads a directory and copies and creates a new file within that directory.
function createFiles (countryCode) {
fs.readdir('./app/data', (err, directories) => {
if (err) {
console.log(err)
} else {
directories.forEach((directory) => {
fs.readdir(`./app/data/${directory}`, (err, files) => {
if (err) console.log(err)
console.log(`Creating ${countryCode}.yml for ${directory}`)
fs.createReadStream(`./app/data/${directory}/en.yml`).pipe(fs.createWriteStream(`./app/data/${directory}/${countryCode}.yml`))
})
})
}
})
}
How do I do this using promises or Promise All to resolve when it's complete?
First, you need to wrap each file stream in a promise that resolves when the stream emits the finish event:
new Promise((resolve, reject) => {
fs.createReadStream(`./app/data/${directory}/en.yml`).pipe(
fs.createWriteStream(`./app/data/${directory}/${countryCode}.yml`)
).on('finish', resolve);
});
The you need to collect these promises in an array. This is done by using map() instead of forEach() and returning the promise:
var promises = directories.map((directory) => {
...
return new Promise((resolve, reject) => {
fs.createReadStream( ...
...
});
});
Now you have a collection of promises that you can wrap with Promise.all() and use with a handler when all the wrapped promises have resolved:
Promise.all(promises).then(completeFunction);
In recent versions of Node (8.0.0 and later), there's a new util.promisify function you can use to get a promise. Here's how we might use it:
// Of course we'll need to require important modules before doing anything
// else.
const util = require('util')
const fs = require('fs')
// We use the "promisify" function to make calling promisifiedReaddir
// return a promise.
const promisifiedReaddir = util.promisify(fs.readdir)
// (You don't need to name the variable util.promisify promisifiedXYZ -
// you could just do `const readdir = util.promisify(fs.readdir)` - but
// I call it promisifiedReaddir here for clarity.
function createFiles(countryCode) {
// Since we're using our promisified readdir function, we'll be storing
// a Promise inside of the readdirPromise variable..
const readdirPromise = promisifiedReaddir('./app/data')
// ..then we can make something happen when the promise finishes (i.e.
// when we get the list of directories) by using .then():
return readdirPromise.then(directories => {
// (Note that we only get the parameter `directories` here, with no `err`.
// That's because promises have their own way of dealing with errors;
// try looking up on "promise rejection" and "promise error catching".)
// We can't use a forEach loop here, because forEach doesn't know how to
// deal with promises. Instead we'll use a Promise.all with an array of
// promises.
// Using the .map() method is a great way to turn our list of directories
// into a list of promises; read up on "array map" if you aren't sure how
// it works.
const promises = directory.map(directory => {
// Since we want an array of promises, we'll need to `return` a promise
// here. We'll use our promisifiedReaddir function for that; it already
// returns a promise, conveniently.
return promisifiedReaddir(`./app/data/${directory}`).then(files => {
// (For now, let's pretend we have a "copy file" function that returns
// a promise. We'll actually make that function later!)
return copyFile(`./app/data/${directory}/en.yml`, `./app/data/${directory}/${countryCode}.yml`)
})
})
// Now that we've got our array of promises, we actually need to turn them
// into ONE promise, that completes when all of its "children" promises
// are completed. Luckily there's a function in JavaScript that's made to
// do just that - Promise.all:
const allPromise = Promies.all(promises)
// Now if we do a .then() on allPromise, the function we passed to .then()
// would only be called when ALL promises are finished. (The function
// would get an array of all the values in `promises` in order, but since
// we're just copying files, those values are irrelevant. And again, don't
// worry about errors!)
// Since we've made our allPromise which does what we want, we can return
// it, and we're done:
return allPromise
})
}
Okay, but, there's probably still a few things that might be puzzling you..
What about errors? I kept saying that you don't need to worry about them, but it is good to know a little about them. Basically, in promise-terms, when an error happens inside of a util.promisify'd function, we say that that promise rejects. Rejected promises behave mostly the same way you'd expect errors to; they throw an error message and stop whatever promise they're in. So if one of our promisifiedReaddir calls rejects, it'll stop the whole createFiles function.
What about that copyFile function? Well, we have two options:
Use somebody else's function. No need to re-invent the wheel! quickly-copy-file looks to be a good module (plus, it returns a promise, which is useful for us).
Program it ourselves.
Programming it ourselves isn't too hard, actually, but it takes a little bit more than simply using util.promisify:
function copyFile(from, to) {
// Hmm.. we want to copy a file. We already know how to do that in normal
// JavaScript - we'd just use a createReadStream and pipe that into a
// createWriteStream. But we need to return a promise for our code to work
// like we want it to.
// This means we'll have to make our own hand-made promise. Thankfully,
// that's not actually too difficult..
return new Promise((resolve, reject) => {
// Yikes! What's THIS code mean?
// Well, it literally says we're returning a new Promise object, with a
// function given to it as an argument. This function takes two arguments
// of its own: "resolve" and "reject". We'll look at them separately
// (but maybe you can guess what they mean already!).
// We do still need to create our read and write streams like we always do
// when copying files:
const readStream = fs.createReadStream(from)
const writeStream = fs.createWriteStream(to)
// And we need to pipe the read stream into the write stream (again, as
// usual):
readStream.pipe(writeStream)
// ..But now we need to figure out how to tell the promise when we're done
// copying the files.
// Well, we'll start by doing *something* when the pipe operation is
// finished. That's simple enough; we'll just set up an event listener:
writeStream.on('close', () => {
// Remember the "resolve" and "reject" functions we got earlier? Well, we
// can use them to tell the promise when we're done. So we'll do that here:
resolve()
})
// Okay, but what about errors? What if, for some reason, the pipe fails?
// That's simple enough to deal with too, if you know how. Remember how we
// learned a little on rejected promises, earlier? Since we're making
// our own Promise object, we'll need to create that rejection ourself
// (if anything goes wrong).
writeStream.on('error', err => {
// We'll use the "reject" argument we were given to show that something
// inside the promise failed. We can specify what that something is by
// passing the error object (which we get passed to our event listener,
// as usual).
reject(err)
})
// ..And we'll do the same in case our read stream fails, just in case:
readStream.on('error', err => {
reject(err)
})
// And now we're done! We've created our own hand-made promise-returning
// function, which we can use in our `createFiles` function that we wrote
// earlier.
})
}
..And here's all the finished code, so that you can review it yourself:
const util = require('util')
const fs = require('fs')
const promisifiedReaddir = util.promisify(fs.readdir)
function createFiles(countryCode) {
const readdirPromise = promisifiedReaddir('./app/data')
return readdirPromise.then(directories => {
const promises = directory.map(directory => {
return promisifiedReaddir(`./app/data/${directory}`).then(files => {
return copyFile(`./app/data/${directory}/en.yml`, `./app/data/${directory}/${countryCode}.yml`)
})
})
const allPromise = Promies.all(promises)
return allPromise
})
}
function copyFile(from, to) {
return new Promise((resolve, reject) => {
const readStream = fs.createReadStream(from)
const writeStream = fs.createWriteStream(to)
readStream.pipe(writeStream)
writeStream.on('close', () => {
resolve()
})
writeStream.on('error', err => {
reject(err)
})
readStream.on('error', err => {
reject(err)
})
})
}
Of course, this implementation isn't perfect. You could improve it by looking at other implementations - for example this one destroys the read and write streams when an error occurs, which is a bit cleaner than our method (which doesn't do that). The most reliable way would probably to go with the module I linked earlier!
I highly recommend you watch funfunfunction's video on promises. It explains how promises work in general, how to use Promise.all, and more; and he's almost certainly better at explaining this whole concept than I am!
First, create a function that returns a promise:
function processDirectory(directory) {
return new Promise((resolve, reject) => {
fs.readdir(`./app/data/${directory}`, (err, files) => {
if (err) reject(err);
console.log(`Creating ${countryCode}.yml for ${directory}`);
fs.createReadStream(`./app/data/${directory}/en.yml`)
.pipe(fs.createWriteStream(`./app/data/${directory}/${countryCode}.yml`))
.on('finish', resolve);
});
});
}
Then use Promise.all:
Promise.all(directories.map(processDirectory))
.then(...)
.catch(...);