JS nested promises in for loop - javascript

I have something like:
let promises = [];
fetch("list.of.names").then(names => {
for n of names {
promises.push(fetch("names/"+n));
}
Promise.all(promises).then(all => {
for item of all {
//item and name `n` are needed:
element.innerText += n + ": " item.info;
}
})
})
And the thing is I need both in the end, but obviously n is just the last value, because the for loop already finished. Any idea how I can do that nicely? Is there a way to append elements to a promise?

I guess you can make fetch(name) resolve into something like [result, name]:
fetch("list.of.names").then(names => {
let promises = [];
for (let name of names) {
promises.push(
fetch("names/" + name).then(item => [item, name])
);
}
return Promise.all(promises);
}).then(all => {
for (let [item, name] of all) {
// do stuff
}
})

This is just an improvement (imo.) of #basickarl's code
(async() => {
// this needs to be done first:
const names = await fetch('list.of.names');
// this part, we'd like to happen in paralell:
const promises = names.map(async(name) => [name, await fetch(`names/${name}`)]);
// there are no guarantees that the requests will resolve in sequence; nor do we care.
// but this, we'd want to be executed in sequence:
for await (const [name, item] of promises) {
element.innerText += name + ': ' + item.info;
}
})();

Try this:
(async () => {
const names = await fetch('list.of.names');
const namesAndItemsPromises = names.map(async (name) => {
const item = await fetch(`names/${name}`);
element.innerText += name + ': ' + item.info;
return [name, item];
});
const namesAndItems = Promise.all(namesAndItemsPromises);
})();
My advice would be to try and get away from "then" as much as possible, it causes so much confusion.

Related

how can I get the final result from a array of async ?

get-video-duration is a npm module that get the duration of video.
const { getVideoDurationInSeconds } = require('get-video-duration')
// From a local path...
getVideoDurationInSeconds('video.mov').then((duration) => {
console.log(duration)
})
I want to use this module to get the total duration of all videos from an Array of video pathes.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
}
The thing is getVideoDurationInSeconds is Asynchronous, I can't just simply return the result.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
return total_duration;
}
How can I get the final result? Thank you in advance!
create a function which returns a promise
and then use it to calculate total duration
function getTotals(video_Array) {
let video_ArrayPromises=video_Array.map(video=>
getVideoDurationInSeconds(video));
return Promise.all([video_ArrayPromises]).then((values) => {
//Calculate TotalDuration
return duratons.reduce((accumulator, currentValue) => accumulator + currentValue);
});
}
getTotals(['movie1.mov','movie2.mov']).then(totalDuration => {
//use total duration
});
Create an array of getVideoDurationInSeconds promises with map, then reduce over the values returned by Promise.all to get your final total.
Additional documentation
async/await
// Mock function that returns a promise.
// When it's resolved it will return a random number
// muliplied by the element passed in through the function arguments
function getVideoDurationInSeconds(el) {
const rnd = Math.floor(Math.random() * (10 - 1) + 1);
return new Promise((res, rej) => {
setTimeout(() => {
console.log(el * rnd);
res(el * rnd);
}, 1000);
});
}
async function getTotals(videoArray) {
// `map` over the elements of the video array
// and create a new array of promises
const promises = videoArray.map(el => getVideoDurationInSeconds(el));
// Wait until all the promises have resolved
const data = await Promise.all(promises);
// Then return the sum of each total in the data array
return data.reduce((acc, c) => acc += c, 0);
}
(async function main() {
console.log(`Total: ${await getTotals([1, 2, 3, 4])}`);
}());
Return an array of requests and use reduce to get the total time.
// Array
function getTotalTime() {
return videoList.map(async (video) => await getVideoDurationInSeconds(video));
}
// Just invoke whereever you want..
await Promise.all(getTotalTime()).then(result => {
let totalTime = result.reduce((acc, cv) => acc + cv, 0); // 0 = default value
})

Javascript map, reduce not working when implemented within object method

Based on the answer from this question I implemented the map reduce code within an object method.
this.displayValueGraph = async () => {
let scaleData = [];
this.positions.forEach(async (pos, i) => {
scaleData[i] = [];
let gdata = await pos.graphData;
gdata.option.forEach((d) => {
scaleData[i].push(d.map((x) => x * pos.size));
});
});
let out;
if (scaleData.length == 1) {
out = scaleData[0];
} else {
out = scaleData.reduce((a, b) => b.map((x, j) => x.map((v, k) => a[j][k] + v)));
}
};
The code by itself works fine. I have taken the input data (above scaleData) and run it through the map reduce function and the output is as expected. But if I include it as part of this method it does nothing. It doesn't throw any errors, it simply returns an empty array.
I have tried adding an empty array as an "initial value", but it doesn't help.
The root cause of the problem appears to have been the first forEach loop, where I included an await. I replaced the forEach with for in and it solved the problem.
this.displayValueGraph = async () => {
let scaleData = [];
for (const i in this.positions) {
const pos = this.positions[i];
scaleData[i] = [];
let gdata = await pos.graphData;
gdata.option.forEach((d) => {
scaleData[i].push(d.map((x) => x * pos.size));
});
}
let out;
if (scaleData.length == 1) {
out = scaleData[0];
} else {
out = scaleData.reduce((a, b) => b.map((x, j) => x.map((v, k) => a[j][k] + v)));
}
};

How to wait for iteration to complete before returning

I am trying to loop through an Array of JSON objects (var requestArray = req.body;, specifically requestArray['filter']), persisting each object into a database. After each persistence, I pull the last persisted data table, add it to an array let responseDataArray = []; in responseDataArray.push(result);. This array is then returned as a request response.
app.post('/sound', function (req, res) {
var requestArray = req.body;
let responseDataArray = [];
for (var i = 0; i < requestArray['filter'].length; i++) {
if (i > 3)
break;
var revEl = requestArray['filter'][i];
// console.log('GUID >>> ' + i + ' : ' + revEl['_revEntityGUID'] + ' >>> ' + JSON.stringify(revEl));
persistSingleItemPromise(revEl).then(function (result) {
responseDataArray.push(result);
console.log(JSON.stringify(responseDataArray));
});
}
console.log((responseDataArray));
res.send(responseDataArray);
});
The problem is in the for loop. It delays, and I only return an empty array responseDataArray = [] since it returns before the iteration completes.
I have tried using a Promose persistSingleItemPromise:
let persistSingleItemPromise = function (revData) {
return new Promise(function (resolve, reject) {
revPersSaveRevEntity.revPersSaveRevEntity(revData, function (result) {
resolve(result);
});
});
};
This doesn't help. How can I resolve this?
Thank you all in advance.
I was thinking of something like this.
Didn't test it please let me know if it works ;-)
Keep in mind, that your callback needs the async prefix too.
const resultPromise = requestArray['filter'].reduce( async ( accPromise, revEl ) => {
const acc = await accPromise
const result = await persistSingleItemPromise(revEl)
acc.push( result )
return result
}, Promise.resolve( [] ) )
const responseDataArray = await resultPromise
You could use Promise.all and store the promises. Then, wait for all of them to resolve
Like
app.post("/sound", function(req, res) {
var requestArray = req.body;
let responsePromises = [];
for (var i = 0; i < requestArray["filter"].length; i++) {
if (i > 3) break;
var revEl = requestArray["filter"][i];
// console.log('GUID >>> ' + i + ' : ' + revEl['_revEntityGUID'] + ' >>> ' + JSON.stringify(revEl));
responsePromises.push(persistSingleItemPromise(revEl));
}
Promise.all(responsePromises).then(result => res.send(result));
});
An example simulation here
const promises = [];
for (let i = 1; i < 4; i++) {
promises.push(new Promise(resolve => {
// Simulate asynchronous request
setTimeout(() => {
resolve("Resolved " + i);
}, 100 * i);
}));
}
// Notice how the result takes some time.
// It's basically waiting for all the promises to resolve
Promise.all(promises).then(results => console.log(results));
I think you should add all your promises from "persistSingleItemPromise" to an array and perform a Promise.All(list).then() on them and await the result before returning.

Run concurrent HTTP requests in an async function

I am working on a project that needs an async function that's roughly equivalent to the following
async function task(url) {
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
//r2 is an array of urls
var total = 0;
for (var u of r2) {
tmp = await fetch(u).then(resp => resp.text());
total += parseInt(tmp)
}
return total
}
The issue is that there are hundreds of elements in r2, each of the element is an URL. If I do it sequentially, this function will take a loooong time to complete. I would like to run 10 URLs concurrently (could be adjusted to other numbers), wonder how would I rewrite the async function.
Chunk the initial array into pieces of 10, then wait for each chunk to complete with Promise.all before starting the next one:
async function getTotal(urlPart, subArr) {
const resps = await Promise.all(subArr.map(url =>
fetch(url).then(resp => resp.json())
))
return resps.reduce((a, b) => a + b);
}
async function task(url) {
const r1 = await fetch(url).then(resp => resp.text());
const r2 = await fetch(url + "/" + r1).then(resp => resp.json());
const chunks = [];
const { length } = r2
for (let i = 0; i < length; i += 10) {
chunks.push(r2.slice(i, i + 10));
}
let total = 0;
for (const subArr of chunks) {
total += await getTotal(urlPart, subarr);
}
return total;
}
Here's some code I created years ago that allows you to create a "parallel" queue
const makeQueue = length => {
length = (isNaN(length) || length < 1) ? 1 : length;
const q = Array.from({length}, () => Promise.resolve());
let index = 0;
const add = cb => {
index = (index + 1) % length;
return (q[index] = q[index].then(() => cb()));
};
return add;
};
This will allow up to 10 simultaneous requests (or whatever you pass in as the argument)
In your code, I guess you could use it like
async function task(url) {
const q = makeQueue(10); // 10 requests at a time
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text())))).then(v => v.map(parseInt).reduce((a, b) => a+b));
}
the return can also be
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text()).then(parseInt)))).then(v => v.reduce((a, b) => a+b));
broken down that is equivalent of
const fetch1 = u => fetch(u).then(resp => resp.text()).then(parseInt);
const promises = r2.map(u => q(() => fetch1(u)));
return Promise.all(promises).then(v => v.reduce((a, b) => a+b));
The benefit of this method is that there should be 10 requests "on the go" for a maximum amount of time
Note, browsers tend to limit the number of simultaneous requests per host, so you may not see any improvement with queue size greater than 6 (I think that's the most common limit)
Appreciate all the good answers here! I studied them and come up with the following solution which I think is slightly simpler (for many of us beginners) :-)
This solution doesn't divid all the url-fetching jobs in the beginning because it's uncertain how much time each url-fetching will take.
Instead it makes each worker go through all the urls, if a url is assigned to another worker, it will just move on to next one.
var tasks
var total = 0
var gId = 0
var workerId
manager(4)
async function manager(numOfWorkers) {
var workers = []
tasks = r2.map(function(u) {return {id: gId++, assigned: -1, url: u }})
for (var i=0; i<numOfWorkers; i++) { workers.push(worker()) }
await Promise.all(workers)
console.log(total)
}
async function worker() {
var wid = workerId; workerId ++;
var tmp;
for (var u of tasks) {
if (u.assigned == -1) {
u.assigned = wid;
console.log("unit " + u.id + " assigned to " + wid)
tmp = await fetch(u.url).then(r=>r.text())
total += parseInt(tmp);
}
}
}
In short, ditch the await. By using await, you are literally telling it to wait here until it is done with this one thing.
If you want to parallelize them, make use of Promise.all(). Any async function returns a Promise which can still be used like a normal Promise. Promise.all() accepts an array of Promise objects, and will call then() once all of those requests are done, giving you an array of the results from each.
You could do something like this:
const urls = [/* bunch of URLs */];
Promise.all(
urls.map(url =>
fetch(url).then(res => res.text())
)
).then(results => /* do something with results */)
In this case, results will be an array of the results from your various requests, in the same order as they were passed in.
Now, if you want to be able to have a specific number of them running at a time, you'd want to change it up a bit and have some limits on what's going on.
I usually use a technique which just uses a simple counter to keep track of how many are active, and then fires off more when they are done.
You can do something like this:
// dummy fetch for example purposes, resolves between .2 and 3 seconds
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
const inputUrls = ['a', 'b', 'c', 'd', 'e', 'f', 'g'];
const limit = 2; // this sets the limit of how many can run at once, set to 10 to run 10 concurrently
const delay = 100; // delay in ms between each batch starting
function fetchAll(urls) {
let active = 0;
let queue = urls.slice(0); // clone urls
// inner function so urls and results can be shared with all calls
function fetchAllInner() {
if (active < limit && queue.length) {
const count = Math.min(limit - active, queue.length);
const urlsThisBatch = queue.slice(0, count);
queue = queue.slice(count); // remaining
return Promise.all(
urlsThisBatch.map(url => {
active++; // increment active
console.log('start', url);
return fakeFetch(url)
.then(r => {
console.log('done', url);
active--; // decrement active
return new Promise(resolve => // new Promise to promisify setTimeout
setTimeout(() =>
resolve(fetchAllInner() // kicks off run again when one finishes
.then(fetchR => [].concat(r, fetchR)) // combine them
), delay
)
);
})
})
).then(r => r.reduce((a, u) => [].concat(u, a), [])); // flatten from Promise.all()
}
return Promise.resolve([]); // final resolve
}
return fetchAllInner();
}
fetchAll(inputUrls)
.then(results => console.log('all done', results));
In a nutshell, what this is doing is it'll create a Promise.all() for a batch (however many we can start up until we hit our limit). Then, when one finishes, it'll set a timeout to start up another batch by recursively calling the same function. It's wrapped in another function simply to avoid having to have some variables be global.
This also has an added delay if you want, so you can throttle how many requests you'll make and not hammer the system too bad. If you don't want to use a delay, you can just set it to 0 or remove the new Promise(resolve => setTimeout bit.
The above version is a bit verbose to make it easier to understand. Here is a more "production-ready" version (be sure to switch fakeFetch to fetch and handle calling res.text())
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
function fetchAll(urls, limit = 10, delay = 200) {
let active = 0;
const queue = urls.splice(0);
function fetchAllInner() {
if (active >= limit || !queue.length) {
return Promise.resolve([]);
}
const count = Math.min(limit - active, queue.length);
active = limit;
return Promise.all(
queue.splice(0, count)
.map(url => fakeFetch(url)
.then(r => {
active--;
return new Promise(resolve =>
setTimeout(() => resolve(
fetchAllInner().then(fetchR => [].concat(r, fetchR))
), delay)
);
})
)
).then(r =>
r.reduce((a, u) => [].concat(u, a), []));
}
return fetchAllInner();
}
console.log('give it a few seconds');
fetchAll(['a', 'b', 'c', 'd', 'e', 'f', 'g'])
.then(r => console.log('all done', r))

waiting for Promise in a Loop

Using AngularJs, when using a forEach Loop, the variable outside the loop still always 0:
to explain my problem, this is my code
var totald=0;
children.forEach(function (child) {
fetchdata(child['id']).then(function (resp) {
totald+= resp.total;
domaines.push({'id': child['id'], 'total': resp.total, 'details': resp.details});
});
});
After forEach, when I do console.log(totald), I get 0. but when I put console.log inside the forEach, the variable totald is incremented.
How I can resolve the problem and get the correct value of totald after the forEach finishied
You can map each promise as a list and await all of them using $q.all.
Something like this:
var totald = 0;
var promises = children.map(function (child) {
return fetchdata(child['id']).then(function(response){
return { id: child['id'], response: response };
});
});
$q.all(promises).then(function(results)){
results.forEach(function(result){
totald += result.response.total;
domaines.push({'id': result.id, 'total': result.response.total, 'details': result.response.details});
});
};
You should consider rewritting this code in a functional style; it will be much more readable:
const promises = children.map(async (child) => {
const response = await fetchdata(child['id']);
return {
id: child['id'],
response
};
});
const results = await Promise.all(promises);
const total = results.map(result => result.response.total)
.reduce((x, y) => x + y, 0);
const domains = results.map(result => ({
id: result.id,
total: result.response.total,
details: result.response.details
});
The most significant change is using map instead of forEach. There is never really a reason to use forEach because the for (... of ...) construct more clearly suggests side-effects. map is also more compact:
const ys = xs.map(x => x + 1);
vs...
const ys = [];
xs.forEach(x => {
ys.push(x + 1);
})
If you are concerned about browser-support for async-await then you can use Babel + Webpack.
You can use Promise.all:
var total = 0;
Promise.all(
children.map(function(c) { return fetchdata(child['id']); })
).then(function(datas) {
datas.forEach(function(data) {
total += data.total;
domaines.push({'id': child['id'], 'total': data.total, 'details': data.details});
});
});

Categories

Resources