How can I iterate through an array of data using Promises and returning data? I have seen some promises.push(asyncFunc) methods but some of the entries from my array will fail so from what I gather I can't use that.
var filesFromDisk = [
'41679_4_2015-09-06_17-02-12.mp4',
'41679_4_2015-09-06_17-02-12.smil',
'41680_4_2015-09-09_10-44-05.mp4'
];
start(filesFromDisk)
.then((data) => {
console.log(data); // Want my data here
});
I start start(dbFiles) from another file which is why I want the data returned there.
function start(dbFiles) {
var listOfFiles = [],
promises = [];
return new Promise((fulfill, reject) => {
for (var i = 0; i < dbFiles.length; i++) {
getMp4(dbFiles[i])
.then((data) => {
listOfFiles = listOfFiles.concat(data);
console.log(listOfFiles);
})
}
fulfill(listOfFiles) // Need to happen AFTER for loop has filled listOfFiles
});
}
So for every entry in my array I want to check if the file with the new extension exists and read that file. If the file with extension does not exist I fulfill the original file. My Promise.all chain works and all the data is returned in for loop above (getMp4(dbFiles[i]))
function getMp4(filename) {
var mp4Files = [];
var smil = privateMethods.setSmileExt(localData.devPath + filename.toString());
return new Promise((fulfill, reject) => {
Promise.all([
privateMethods.fileExists(smil),
privateMethods.readTest(smil)
]).then(() => {
readFile(filename).then((files) => {
fulfill(files)
});
}).catch((err) => {
if (!err.exists) fulfill([filename]);
});
});
}
function readFile(filename){
var filesFromSmil = [];
return new Promise((fulfill, reject) => {
fs.readFile(localData.devPath + filename, function (err, res){
if (err) {
reject(err);
}
else {
xmlParser(res.toString(), {trim: true}, (err, result) => {
var entry = JSON.parse(JSON.stringify(result.smil.body[0].switch[0].video));
for (var i = 0; i < entry.length; i++) {
filesFromSmil.push(privateMethods.getFileName(entry[i].$.src))
}
});
fulfill(filesFromSmil);
}
});
});
};
Methods in the Promise.all chain in getMp4 - have no problems with these that I know.
var privateMethods = {
getFileName: (str) => {
var rx = /[a-zA-Z-1\--9-_]*.mp4/g;
var file = rx.exec(str);
return file[0];
},
setSmileExt: (videoFile) => {
return videoFile.split('.').shift() + '.smil';
},
fileExists: (file) => {
return new Promise((fulfill, reject) => {
try {
fs.accessSync(file);
fulfill({exists: true})
} catch (ex) {
reject({exists: false})
}
})
},
readTest: (file) => {
return new Promise((fulfill, reject) => {
fs.readFile(file, (err, res) => {
if (err) reject(err);
else fulfill(res.toString());
})
})
}
}
If you need them to run in parallel, Promise.all is what you want:
function start(dbFiles) {
return Promise.all(dbFiles.map(getMp4));
}
That starts the getMp4 operation for all of the files and waits until they all complete, then resolves with an array of the results. (getMp4 will receive multiple arguments — the value, its index, and a a reference to the dbFiles arary — but since it only uses the first, that's fine.)
Usage:
start(filesFromDisk).then(function(results) {
// `results` is an array of the results, in order
});
Just for completeness, if you needed them to run sequentially, you could use the reduce pattern:
function start(dbFiles) {
return dbFiles.reduce(function(p, file) {
return p.then(function(results) {
return getMp4(file).then(function(data) {
results.push(data);
return results;
});
});
}, Promise.resolve([]));
}
Same usage. Note how we start with a promise resolved with [], then queue up a bunch of then handlers, each of which receives the array, does the getMp4 call, and when it gets the result pushes the result on the array and returns it; the final resolution value is the filled array.
Related
I'm seeking guidance on how best to handle the following scenario. I'm fairly new to JS and async development, so I'm trying to understand the best way to handle this. I call one api (callAPI) which returns an array of items. I need to take those items and send them to another api (callAPI2) but that api doesn't have a bulk method, so I have to call the api for each item in the array. Below is how I have it structured: getArray promise returns the first array, I send the array to promise 2 (getIndividualData) where I loop and call the second api. I'm wondering if there are better ways to structure this? If I pass in a large array, I may need to pace the calls to the api so I don't get throttled... so maybe I need some version of Promise.all?
let getArray = function() {
return new Promise(function(resolve,reject) {
callApi.get().on('success', function(result, response) {
resolve(result);
});
});
}
let getIndividualData = function(arrayOfItems) {
return new Promise(function(resolve,reject) {
var responseArray = [];
for(var i = 0; i < arrayOfItems.length; i++) {
callApi2.get(arrayOfItems[i]).on('success', function(result, response) {
responseArray.push(result);
});
}
resolve(responseArray);
});
}
let failureCallback = function() {
return "Error!";
}
getArray().then(function(response) {
return getIndividualData(response);
}).then(function(finalArray) {
console.log(`The final array is ${JSON.stringify(finalArray)}`);
}).catch(failureCallback);
You can make a request for each item in a large array without getting throttled by implementing a concurrency throttler with a Set of Promises and async/await syntax. I've duplicated your code below, modifying the implementation of getIndividualData and passing in concurrency as an option.
let getArray = function() {
return new Promise(function(resolve,reject) {
callApi.get().on('success', function(result, response) {
resolve(result);
});
});
}
let getIndividualData = async function(arrayOfItems, { concurrency }) {
var promiseSet = new Set(),
responseArray = [],
i = 0;
while (i < arrayOfItems.length) {
if (promiseSet.size >= concurrency) {
await Promise.race(promiseSet)
}
const promise = new Promise(function(resolve,reject) {
callApi2.get(arrayOfItems[i]).on('success', function(result, response) {
resolve(result)
})
})
responseArray.push(promise.then(result => {
promiseSet.delete(promise)
return result
}))
i += 1
}
return Promise.all(responseArray)
}
let failureCallback = function() {
return "Error!";
}
getArray().then(function(response) {
return getIndividualData(response, { concurrency: 10 });
}).then(function(finalArray) {
console.log(`The final array is ${JSON.stringify(finalArray)}`);
}).catch(failureCallback);
Reformulating this to a helper that promisifies an object that has an .on('success') event handler and an async function for the top-level .then()ish code gives us something like this...
To pace the API calls, add in p-limit or similar to getIndividualData.
function promisifyOnSuccessObj(onSuccessObj) {
return new Promise((resolve) => {
onSuccessObj.on("success", (result, response) => resolve(result));
// TODO: what about `response`?
// TODO: onSuccessObj.on('error')..?
});
}
function getIndividualData(arrayOfItems) {
// Returns an array of promises
return arrayOfItems.map((item) =>
promisifyOnSuccessObj(callApi2.get(item)),
);
}
async function doThings() {
const result = await promisifyOnSuccessObj(callApi.get());
const individualDatas = await Promise.all(getIndividualData(result));
console.log(`The final array is ${JSON.stringify(individualDatas)}`);
}
You could combine Promise.all, map and async...await syntax and in the end get one array of resolved individual promises based on the previous resolved array promise.
const mockApi = {
request: (response) => {
return new Promise((resolve, reject) => {
setTimeout(() => resolve(response), 1000)
})
},
getArray() {
return this.request(['foo', 'bar', 'baz'])
},
getItem(item) {
return this.request(`Resolved: ${item}`)
}
}
async function getData() {
const array = await mockApi.getArray();
const final = await Promise.all(array.map(e => mockApi.getItem(e)));
console.log(final)
}
getData()
I have a an array of chunked data that I need to upload one chunk at time. The current implementation I used it to encapsulate the logic in an Promise.all() since I need to return the result of the promise,
The problem with this approach is that all the upload is done asynchronously resulting in a Timeout error as the server can't process all the requests at the same time, How can I modify this method so that the upload is done one chunk at time ?.
My code:
var chunks = _.chunk(variableRecords, 30);
return Promise.all(
chunks.map(chunk => this.portalService.updateDataForChart(variableId, chunk)))
.then((updateRes: boolean[]) => {
if (updateRes.every(updateStatus => updateStatus)) {
return this.executeRequest<HealthDataSource, boolean>({
path: `/variable/user/datasources/${dataSource.identifier}`,
method: 'PUT',
body: {
libelle: dataSource.datasource.libelle,
type: dataSource.datasource.type,
lastSyncDate: Math.max(maxDate, dataSource.datasource.lastSyncDate)
},
headers: this.getHeaders()
});
} else {
return false;
}
});
You need them in SEQUENCE , for of is the way to go :
async function chunksSequence(chunks) {
for(const chunk of chunks) {
await // your other code here
}
};
If you need to return something
async function chunksSequence(chunks) {
let results = []
for(const chunk of chunks) {
let result = await // your other code here
results.push(result)
}
return results
};
Because of comment needed in a promise on return
async function chunksSequence(chunks) {
return new Promise((resolve, reject)=>{
let results = []
for(const chunk of chunks) {
let result = await // your other code here
results.push(result)
}
resolve(results)
}
};
You can do this with the help of Array.reduce()
const chunks = _.chunk(variableRecords, 30);
return tasks.reduce((promiseChain, currentTask) => {
return promiseChain.then(chainResults =>
currentTask.then(currentResult =>
[ ...chainResults, currentResult ]
)
);
}, Promise.resolve([])).then(arrayOfResults => {
// Do something with all results
});
Source : https://decembersoft.com/posts/promises-in-serial-with-array-reduce/
If you don't / can't use await you could use something like this
function runSequenceItem(chunks, index) {
return new Promise(chunks[index])
.then(res => {
index ++
if (index < chunks.length) {
return runSequence(chunks[index], index + 1)
} else {
// this is not needed actually
return 'done'
}
})
}
function runInSequence(chunks) {
return runSequenceItem(chunks, 0)
}
If you also need the results then you can return an array at the end of the recursion runInSequence
function runSequenceItem(chunks, index, results) {
return new Promise(chunks[index])
.then(res => {
results.push(res)
index ++
if (index < chunks.length) {
return runSequence(chunks[index], index + 1)
} else {
return results
}
})
}
function runInSequence(chunks) {
return runSequenceItem(chunks, 0, [])
}
and then retrieve it at the end
let results = runInSequence(chunks)
I think my problem is with the asynchronous nature of JS. I'm trying to push items in an array, but it doesn't seem to be updating it... I did console.log statements inside the for loop and see it populate the array with numbers, but when I console.log the array outside the loop, I get an empty array. I am using Mongoose.
Any suggestions?
Here's the code:
let collections = [];
return Promise.all(courts.map(court => {
return new Promise((resolve, reject) => {
return Promise.all(court.users.map(async user => {
let tempPromise = new Promise((resolve, reject) => {
setTimeout(() => {
resolve();
}, 5000);
});
return SignDetail.find({
userName: user.userName,
signStatus: "signIn",
}).then(function(sign) {
if (user.userName.endsWith('zs')) {
let signCount = 0;
if (sign.length > 1) {
for (let j = 0; j < sign.length; j++) {
let courtObj = {courtName: sign[j].userName}; //make court object
signCount++; //increment each time there's a signature
if (j === sign.length - 1) { //only push object in array when all signatures have been counted
courtObj.signCount = signCount;
collections.push(courtObj);
console.log(collections)
}
}
}
} //end here
});
return tempPromise;
})).then(_ => resolve(collections));
})
})).then(collections => {
// HERE you will your collection and you can use this promise where this function is being called.
console.log(collections);
});
Function SignDetail.find() is async function, you cannot return the res.render synchronously. You need to return a promise from this function which resolves to desired output.
You can do something like this.
let collections = [];
return Promise.all(courts.map(court => {
return new Promise((resolve, reject) => {
return Promise.all(court.users.map(oUser => {
var tempPromise = new Promise();
if(oUser.userName.endsWith('zs')){
SignDetail.find(
{username: oUser.username. signStatus: 'signIn'},
function(err, sign){
collection.push(sign.length);
tempPromise.resolve();
})
} else{
tempPromise.resolve();
}
return tempPromise;
})).then(_ => resolve());
})
})).then(_ => {
// HERE you will your collection and you can use this promise where this function is being called.
console.log(collections);
});
I have searched high and low but can't get my head around promises. What I do understand is how to define one promise and use its result by using .then.
What I do not understand is how I can create a loop to query the database for different blocks of records. This is needed due to a limit set on the number of records to query.
The predefined promise api call is used like this:
let getRecords = (name) => {
return new Promise((resolve,reject) => {
xyz.api.getRecords(name, 1000, 1000, function(err, result){
// result gets processed here.
resolve(//somevariables here);
});
)};
going with what I am used to, I tried:
for (let i=1; i<90000; i+=500) {
xyz.api.getRecords('james', i, 500, function(err, result){
// result gets processed here.
});
}
But then I can't access the information (could be my wrong doing)
Also tried something like this:
function getRecords(name,i){
xyz.api.getRecords(name, i, 500, function(err, result){
// result gets processed here.
});
};
for (let i=1; i<90000; i+=500) {
var someThing = getRecords('james', i);
}
All tutorials only seem to use one query, and process the data.
How do I call the api function multiple times with different arguments, collect the data and process it once everything is retrieved?
Only thing I can think of is, to write info to a file, terrible thought.
Using async/await
(async () => {
function getRecords(name,i){
// create new Promise so you can await for it later
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, function(err, result){
if(err) {
return reject(err);
}
resolve(result);
});
});
}
for (let i = 1; i < 90000; i += 500) {
// wait for the result in every loop iteration
const someThing = await getRecords('james', i);
}
})();
To handle errors you need to use try/catch block
try {
const someThing = await getRecords('james', i);
} catch(e) {
// handle somehow
}
Using only Promises
function getRecords(name, i) {
// create Promise so you can use Promise.all
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, function (err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
const results = [];
for (let i = 1; i < 90000; i += 500) {
// push Promise's to array without waiting for results
results.push(getRecords("james", i));
}
// wait for all pending Promise's
Promise.all(results).then((results) => {
console.log(results);
});
let count = 0;
function getRecords(name, i) {
return new Promise((resolve, reject) => {
setTimeout(() => {
// example results
resolve((new Array(10)).fill(0).map(() => ++count));
}, 100);
});
}
const results = [];
for (let i = 1; i < 9000; i += 500) {
results.push(getRecords("james", i));
}
Promise.all(results).then((results) => {
console.log("Results:", results);
console.log("Combined results:",[].concat(...results));
});
To handle errors you need to use .catch() block
Promise.all(results).then((results) => { ... }).catch((error) => {
// handle somehow
});
By returning a promise and calling your asynchronous function inside, you can resolve the result and then use it this way:
function getRecords (name, i) {
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, (err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
});
}
for (let i = 1; i < 90000; i * 500) {
getRecords('james', i)
.then(result => {
// do stuff with result
})
}
Or, using async / await syntax:
async function getRecords (name, i) {
return new Promise((resolve, reject) => {
xyz.api.getRecords(name, i, 500, (err, result) => {
if (err) {
reject(err);
} else {
resolve(result);
}
});
});
}
// this needs to happen inside a function, node does not allow top level `await`
for (let i = 1; i < 90000; i *= 500) {
const result = await getRecords('james', i);
// do stuff
}
Get all of your records at once
const requests = [];
for (let i = 1; i < 90000; i * 500) {
requests.push(getRecords('james', i));
}
const results = await Promise.all(requests);
I have the promise function that execute async function in the loop few times for different data. I want to wait till all async functions will be executed and then resolve(), (or call callback function in non-promise function):
var readFiles = ()=>{
return new Promise((resolve,reject)=>{
var iterator = 0;
var contents = {};
for(let i in this.files){
iterator++;
let p = path.resolve(this.componentPath,this.files[i]);
fs.readFile(p,{encoding:'utf8'},(err,data)=>{
if(err){
reject(`Could not read ${this.files[i]} file.`);
} else {
contents[this.files[i]] = data;
iterator--;
if(!iterator) resolve(contents);
}
});
}
if(!iterator) resolve(contents); //in case of !this.files.length
});
};
I increase iterator on every loop repetition, then, in async function's callback decrease iterator and check if all async functions are done (iterator===0), if so - call resolve().
It works great, but seems not elegant and readable. Do you know any better way for this issue?
Following up the comment with some code and more detail!
Promise.all() takes an iterator, and waits for all promises to either resolve or reject. It will then return the results of all the promises. So instead of keeping track of when all promises resolve, we can create little promises and add them to an array. Then, use Promise.all() to wait for all of them to resolve.
const readFiles = () => {
const promises = [];
for(let i in files) {
const p = path.resolve(componentPath, files[i]);
promises.push(new Promise((resolve, reject) => {
fs.readFile(p, {encoding:'utf8'}, (err, data) => {
if(err) {
reject(`Could not read ${files[i]} file.`);
} else {
resolve(data);
}
});
}));
}
return Promise.all(promises);
};
const fileContents = readFiles().then(contents => {
console.log(contents)
})
.catch(err => console.error(err));
You only need push all the Promises into an array to then pass it as argument to Promise.all(arrayOfPromises)
try something like this:
var readFiles = () => {
var promises = [];
let contents = {};
var keys_files = Object.keys(this.files);
if (keys_files.length <= 0) {
var promise = new Promise((resolve, reject) => {
resolve(contents);
});
promises.push(promise);
}
keys_files.forEach((key) => {
var file = this.files[key];
var promise = new Promise((resolve, reject) => {
const currentPath = path.resolve(this.componentPath, file);
fs.readFile(p,{encoding:'utf8'},(err, data) => {
if (err) {
return reject(`Could not read ${file} file.`);
}
contents[file] = data;
resolve(contents)
});
});
});
return Promises.all(promises);
}
Then you should use the function like so:
// this will return a promise that contains an array of promises
var readAllFiles = readFiles();
// the then block only will execute if all promises were resolved if one of them were reject so all the process was rejected automatically
readAllFiles.then((promises) => {
promises.forEach((respond) => {
console.log(respond);
});
}).catch((error) => error);
If you don't care if one of the promises was rejected, maybe you should do the following
var readFiles = () => {
var promises = [];
let contents = {};
var keys_files = Object.keys(this.files);
if (keys_files.length <= 0) {
var promise = new Promise((resolve, reject) => {
resolve(contents);
});
promises.push(promise);
}
keys_files.forEach((key) => {
var file = this.files[key];
var promise = new Promise((resolve, reject) => {
const currentPath = path.resolve(this.componentPath, file);
fs.readFile(p,{encoding:'utf8'},(err, data) => {
// create an object with the information
let info = { completed: true };
if (err) {
info.completed = false;
info.error = err;
return resolve(info);
}
info.data = data;
contents[file] = info;
resolve(contents)
});
});
});
return Promises.all(promises);
}
Copied from comments:
Also - you might want to use fs-extra, a drop-in replacement for fs, but with promise support added.
Here's how that goes:
const fs = require('fs-extra');
var readFiles = ()=>{
let promises = files
.map(file => path.resolve(componentPath, file))
.map(path => fs.readFile(path));
return Promise.all(promises);
});
Nice and clean. You can then get contents like this:
readFiles()
.then(contents => { ... })
.catch(error => { ... });
This will fail on first error though (because that's what Promise.all does). If you want individual error handling, you can add another map line:
.map(promise => promise.catch(err => err));
Then you can filter the results:
let errors = contents.filter(content => content instanceof Error)
let successes = contents.filter(content => !(content instanceof Error))