get-video-duration is a npm module that get the duration of video.
const { getVideoDurationInSeconds } = require('get-video-duration')
// From a local path...
getVideoDurationInSeconds('video.mov').then((duration) => {
console.log(duration)
})
I want to use this module to get the total duration of all videos from an Array of video pathes.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
}
The thing is getVideoDurationInSeconds is Asynchronous, I can't just simply return the result.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
return total_duration;
}
How can I get the final result? Thank you in advance!
create a function which returns a promise
and then use it to calculate total duration
function getTotals(video_Array) {
let video_ArrayPromises=video_Array.map(video=>
getVideoDurationInSeconds(video));
return Promise.all([video_ArrayPromises]).then((values) => {
//Calculate TotalDuration
return duratons.reduce((accumulator, currentValue) => accumulator + currentValue);
});
}
getTotals(['movie1.mov','movie2.mov']).then(totalDuration => {
//use total duration
});
Create an array of getVideoDurationInSeconds promises with map, then reduce over the values returned by Promise.all to get your final total.
Additional documentation
async/await
// Mock function that returns a promise.
// When it's resolved it will return a random number
// muliplied by the element passed in through the function arguments
function getVideoDurationInSeconds(el) {
const rnd = Math.floor(Math.random() * (10 - 1) + 1);
return new Promise((res, rej) => {
setTimeout(() => {
console.log(el * rnd);
res(el * rnd);
}, 1000);
});
}
async function getTotals(videoArray) {
// `map` over the elements of the video array
// and create a new array of promises
const promises = videoArray.map(el => getVideoDurationInSeconds(el));
// Wait until all the promises have resolved
const data = await Promise.all(promises);
// Then return the sum of each total in the data array
return data.reduce((acc, c) => acc += c, 0);
}
(async function main() {
console.log(`Total: ${await getTotals([1, 2, 3, 4])}`);
}());
Return an array of requests and use reduce to get the total time.
// Array
function getTotalTime() {
return videoList.map(async (video) => await getVideoDurationInSeconds(video));
}
// Just invoke whereever you want..
await Promise.all(getTotalTime()).then(result => {
let totalTime = result.reduce((acc, cv) => acc + cv, 0); // 0 = default value
})
Related
There are 100 promises in an array and we need to process 5 at a time in JS. how to achieve this?
(Asked in Microsoft interview)
Use a pool. There are a number of implementations in JS, such as this one that has a nice looking API:
const PromisePool = require("async-promise-pool");
// concurrency is the only option for PromisePool and enables you to
// choose how many promises will run at once
const pool = new PromisePool({ concurrency: 3 });
// elsewhere add functions to the pool that produce promises. We use
// functions here to prevent the promises from immediately executing.
pool.add(() => thingThatReturnsAPromise());
// you can await pool.all to ensure that all promises in the pool are
// resolved before continuing.
await pool.all();
I would use a function to execute promises in sequence instead of parallel. Once done, create an array of groups of 5 to solve in parallel using Promise.all:
const PROMISES_AMOUNT = 100
const GROUP_AMOUNT = 5
// Function to divide the array in various chuncks of similar size
function chunkArray(myArray, chunk_size){
let index = 0;
let arrayLength = myArray.length;
let tempArray = [];
for (index = 0; index < arrayLength; index += chunk_size) {
myChunk = myArray.slice(index, index+chunk_size);
// Do something if you want with the group
tempArray.push(myChunk);
}
return tempArray;
}
// the promise we will use
function interval(index) {
return new Promise(function(resolve, reject) {
const time = index*100
setTimeout(function() {
console.log(`Waited ${time}!`)
resolve(index);
}, time)
})
};
// Our array of 100 promises
const promises = new Array(PROMISES_AMOUNT).fill(null).map((_, index) => interval(index ))
// The array of 100 promises divided by groups of 5 elements
const groupedPromises = chunkArray(promises, GROUP_AMOUNT).map((promisesGroup) => () => Promise.all(promisesGroup))
// A function to divide an array
function chunkArray(myArray, chunk_size){
var index = 0;
var arrayLength = myArray.length;
var tempArray = [];
for (index = 0; index < arrayLength; index += chunk_size) {
myChunk = myArray.slice(index, index+chunk_size);
// Do something if you want with the group
tempArray.push(myChunk);
}
return tempArray;
}
// A function to execute promises in sequence
const promisesInSequence = (arrayOfTasks) => {
let results = []
return new Promise((resolve, reject) => {
const resolveNext = (arrayOfTasks) => {
// If all tasks are already resolved, return the final array of results
if (arrayOfTasks.length === 0) return resolve(results)
// Extract first promise and solve it
const first = arrayOfTasks.shift()
first().then((res) => {
console.log('Solved a group in parallel: ', res)
results.push(res)
resolveNext(arrayOfTasks)
}).catch((err) => {
reject(err)
})
}
resolveNext(arrayOfTasks)
})
}
promisesInSequence(groupedPromises)
.then((result) => console.log(result))
I am working on a project that needs an async function that's roughly equivalent to the following
async function task(url) {
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
//r2 is an array of urls
var total = 0;
for (var u of r2) {
tmp = await fetch(u).then(resp => resp.text());
total += parseInt(tmp)
}
return total
}
The issue is that there are hundreds of elements in r2, each of the element is an URL. If I do it sequentially, this function will take a loooong time to complete. I would like to run 10 URLs concurrently (could be adjusted to other numbers), wonder how would I rewrite the async function.
Chunk the initial array into pieces of 10, then wait for each chunk to complete with Promise.all before starting the next one:
async function getTotal(urlPart, subArr) {
const resps = await Promise.all(subArr.map(url =>
fetch(url).then(resp => resp.json())
))
return resps.reduce((a, b) => a + b);
}
async function task(url) {
const r1 = await fetch(url).then(resp => resp.text());
const r2 = await fetch(url + "/" + r1).then(resp => resp.json());
const chunks = [];
const { length } = r2
for (let i = 0; i < length; i += 10) {
chunks.push(r2.slice(i, i + 10));
}
let total = 0;
for (const subArr of chunks) {
total += await getTotal(urlPart, subarr);
}
return total;
}
Here's some code I created years ago that allows you to create a "parallel" queue
const makeQueue = length => {
length = (isNaN(length) || length < 1) ? 1 : length;
const q = Array.from({length}, () => Promise.resolve());
let index = 0;
const add = cb => {
index = (index + 1) % length;
return (q[index] = q[index].then(() => cb()));
};
return add;
};
This will allow up to 10 simultaneous requests (or whatever you pass in as the argument)
In your code, I guess you could use it like
async function task(url) {
const q = makeQueue(10); // 10 requests at a time
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text())))).then(v => v.map(parseInt).reduce((a, b) => a+b));
}
the return can also be
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text()).then(parseInt)))).then(v => v.reduce((a, b) => a+b));
broken down that is equivalent of
const fetch1 = u => fetch(u).then(resp => resp.text()).then(parseInt);
const promises = r2.map(u => q(() => fetch1(u)));
return Promise.all(promises).then(v => v.reduce((a, b) => a+b));
The benefit of this method is that there should be 10 requests "on the go" for a maximum amount of time
Note, browsers tend to limit the number of simultaneous requests per host, so you may not see any improvement with queue size greater than 6 (I think that's the most common limit)
Appreciate all the good answers here! I studied them and come up with the following solution which I think is slightly simpler (for many of us beginners) :-)
This solution doesn't divid all the url-fetching jobs in the beginning because it's uncertain how much time each url-fetching will take.
Instead it makes each worker go through all the urls, if a url is assigned to another worker, it will just move on to next one.
var tasks
var total = 0
var gId = 0
var workerId
manager(4)
async function manager(numOfWorkers) {
var workers = []
tasks = r2.map(function(u) {return {id: gId++, assigned: -1, url: u }})
for (var i=0; i<numOfWorkers; i++) { workers.push(worker()) }
await Promise.all(workers)
console.log(total)
}
async function worker() {
var wid = workerId; workerId ++;
var tmp;
for (var u of tasks) {
if (u.assigned == -1) {
u.assigned = wid;
console.log("unit " + u.id + " assigned to " + wid)
tmp = await fetch(u.url).then(r=>r.text())
total += parseInt(tmp);
}
}
}
In short, ditch the await. By using await, you are literally telling it to wait here until it is done with this one thing.
If you want to parallelize them, make use of Promise.all(). Any async function returns a Promise which can still be used like a normal Promise. Promise.all() accepts an array of Promise objects, and will call then() once all of those requests are done, giving you an array of the results from each.
You could do something like this:
const urls = [/* bunch of URLs */];
Promise.all(
urls.map(url =>
fetch(url).then(res => res.text())
)
).then(results => /* do something with results */)
In this case, results will be an array of the results from your various requests, in the same order as they were passed in.
Now, if you want to be able to have a specific number of them running at a time, you'd want to change it up a bit and have some limits on what's going on.
I usually use a technique which just uses a simple counter to keep track of how many are active, and then fires off more when they are done.
You can do something like this:
// dummy fetch for example purposes, resolves between .2 and 3 seconds
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
const inputUrls = ['a', 'b', 'c', 'd', 'e', 'f', 'g'];
const limit = 2; // this sets the limit of how many can run at once, set to 10 to run 10 concurrently
const delay = 100; // delay in ms between each batch starting
function fetchAll(urls) {
let active = 0;
let queue = urls.slice(0); // clone urls
// inner function so urls and results can be shared with all calls
function fetchAllInner() {
if (active < limit && queue.length) {
const count = Math.min(limit - active, queue.length);
const urlsThisBatch = queue.slice(0, count);
queue = queue.slice(count); // remaining
return Promise.all(
urlsThisBatch.map(url => {
active++; // increment active
console.log('start', url);
return fakeFetch(url)
.then(r => {
console.log('done', url);
active--; // decrement active
return new Promise(resolve => // new Promise to promisify setTimeout
setTimeout(() =>
resolve(fetchAllInner() // kicks off run again when one finishes
.then(fetchR => [].concat(r, fetchR)) // combine them
), delay
)
);
})
})
).then(r => r.reduce((a, u) => [].concat(u, a), [])); // flatten from Promise.all()
}
return Promise.resolve([]); // final resolve
}
return fetchAllInner();
}
fetchAll(inputUrls)
.then(results => console.log('all done', results));
In a nutshell, what this is doing is it'll create a Promise.all() for a batch (however many we can start up until we hit our limit). Then, when one finishes, it'll set a timeout to start up another batch by recursively calling the same function. It's wrapped in another function simply to avoid having to have some variables be global.
This also has an added delay if you want, so you can throttle how many requests you'll make and not hammer the system too bad. If you don't want to use a delay, you can just set it to 0 or remove the new Promise(resolve => setTimeout bit.
The above version is a bit verbose to make it easier to understand. Here is a more "production-ready" version (be sure to switch fakeFetch to fetch and handle calling res.text())
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
function fetchAll(urls, limit = 10, delay = 200) {
let active = 0;
const queue = urls.splice(0);
function fetchAllInner() {
if (active >= limit || !queue.length) {
return Promise.resolve([]);
}
const count = Math.min(limit - active, queue.length);
active = limit;
return Promise.all(
queue.splice(0, count)
.map(url => fakeFetch(url)
.then(r => {
active--;
return new Promise(resolve =>
setTimeout(() => resolve(
fetchAllInner().then(fetchR => [].concat(r, fetchR))
), delay)
);
})
)
).then(r =>
r.reduce((a, u) => [].concat(u, a), []));
}
return fetchAllInner();
}
console.log('give it a few seconds');
fetchAll(['a', 'b', 'c', 'd', 'e', 'f', 'g'])
.then(r => console.log('all done', r))
Using AngularJs, when using a forEach Loop, the variable outside the loop still always 0:
to explain my problem, this is my code
var totald=0;
children.forEach(function (child) {
fetchdata(child['id']).then(function (resp) {
totald+= resp.total;
domaines.push({'id': child['id'], 'total': resp.total, 'details': resp.details});
});
});
After forEach, when I do console.log(totald), I get 0. but when I put console.log inside the forEach, the variable totald is incremented.
How I can resolve the problem and get the correct value of totald after the forEach finishied
You can map each promise as a list and await all of them using $q.all.
Something like this:
var totald = 0;
var promises = children.map(function (child) {
return fetchdata(child['id']).then(function(response){
return { id: child['id'], response: response };
});
});
$q.all(promises).then(function(results)){
results.forEach(function(result){
totald += result.response.total;
domaines.push({'id': result.id, 'total': result.response.total, 'details': result.response.details});
});
};
You should consider rewritting this code in a functional style; it will be much more readable:
const promises = children.map(async (child) => {
const response = await fetchdata(child['id']);
return {
id: child['id'],
response
};
});
const results = await Promise.all(promises);
const total = results.map(result => result.response.total)
.reduce((x, y) => x + y, 0);
const domains = results.map(result => ({
id: result.id,
total: result.response.total,
details: result.response.details
});
The most significant change is using map instead of forEach. There is never really a reason to use forEach because the for (... of ...) construct more clearly suggests side-effects. map is also more compact:
const ys = xs.map(x => x + 1);
vs...
const ys = [];
xs.forEach(x => {
ys.push(x + 1);
})
If you are concerned about browser-support for async-await then you can use Babel + Webpack.
You can use Promise.all:
var total = 0;
Promise.all(
children.map(function(c) { return fetchdata(child['id']); })
).then(function(datas) {
datas.forEach(function(data) {
total += data.total;
domaines.push({'id': child['id'], 'total': data.total, 'details': data.details});
});
});
Short questions: Why is there no Promise.chain in Javascript (comparable to Promise.all)? Is my implementation o.k.?
My 'codec' behaved wrong:
Reading a graph from a XML file
Creating all nodes (the creation method returns a promise)
Waiting for all node creations to finish
Create all edges between the nodes
The problem: The order of the database calls for the node creation (Step 2) got mixed up at execution time.
Solution: I had to chain the database calls in correct order before the methods became executed.
/**
* chains a list of functions (that return promises) and executes them in the right order
* [function() {return Promise.resolve();}, function() {return Promise.resolve();}]
*
* #param funcs is an array of functions returning promises
* #returns {Promise}
*/
function chain_promises(funcs) {
if (funcs.length < 1) {
return Promise.resolve();
}
var i = 0;
return chain_executor(funcs, i);
}
/**
* Recursive help method for chain_promises
* 1) executes a function that returns a promise (no params allowed)
* 2) chains itself to the success resolve of the promise
*
* #param funcs is an array of functions returning promises
* #param i is the current working index
*/
function chain_executor(funcs, i) {
var promise = funcs[i]();
return promise.then(function(){
console.log(i);
if (funcs.length > i+1) {
return chain_executor(funcs, i+1);
} else {
return Promise.resolve();
}
})
}
Using Array#reduce you can create this function
const chain_promises = arrayOfFn => arrayOfFn.reduce((promise, fn) =>
promise.then(results =>
fn().then(result =>
results.concat(result)
)
), Promise.resolve([])
);
or if you're into one-liners
const chain_promises = arrayOfFn => arrayOfFn.reduce((promise, fn) => promise.then(results => fn().then(result => results.concat(result))), Promise.resolve([]));
These have the added benefit that the resolved values are all available in the .then
e.g.
const chain_promises = arrayOfFn => arrayOfFn.reduce((promise, fn) =>
promise.then(results =>
fn().then(result =>
results.concat(result)
)
), Promise.resolve([])
);
const wait_promise = (time, result) => new Promise(resolve => setTimeout(resolve, time, result));
var funcs = [
() => wait_promise(300, 'p1').then(value => ({value, date: new Date()})),
() => wait_promise(400, 'p2').then(value => ({value, date: new Date()})),
() => wait_promise(100, 'p3').then(value => ({value, date: new Date()}))
];
const start = new Date();
chain_promises(funcs)
.then(results =>
results.reduce((a, b) => {
console.log(b.value, b.date - a);
return b.date;
}, start)
);
Also, passing an empty array to this function won't break - you'll end up with an empty array as the resolved value
I'm creating an array that iterates asynchronously (for fun). This works fine:
class AsyncArray extends Array {
constructor() {
super();
this.x = 0;
}
[Symbol.iterator]() {
return {
next: () => {
let promise = new Promise(resolve => setTimeout(
() => resolve(this.x++), 1000)
);
return {done: this.x >= this.length, value: promise};
}
};
}
}
async () => {
for (let x of AsyncArray.of(1, 2, 3)) {
let value = await x;
console.log(value);
}
}();
However, this prints out 0...1...2 because I'm keeping track of the current counter on my own and initializing it to x.
Is there any way to get the current iterator value internal to the Array? I would also need to be able to properly determine the done value.
I guess you don't want the counter internal to your array, but rather to your iterator. Use a local variable in the method for that:
[Symbol.iterator]() {
var x = 0;
return {
next: () => {
let promise = new Promise(resolve =>
setTimeout(() => resolve(this[x++]), 1000)
);
return {done: x >= this.length, value: promise};
}
};
}
The easiest way to write iterators though is by using a generator function:
[Symbol.iterator]*() {
for (var x = 0; x < this.length; x++)
yield new Promise(resolve =>
setTimeout(() => resolve(this[x]), 1000)
);
}
That will take care of the correct done value as well (and won't "return" a promise that resolves with undefined).
An alternative that would completely avoid tracking state in a local variable or instance property would be to make use of the standard array iterator:
[Symbol.iterator]() {
var vals = super[Symbol.iterator]();
var it = Object.create(Object.getPrototypeOf(vals)); // an array iterator
it.next = () => {
var res = vals.next();
if (!res.done)
return {done: false, value: new Promise(resolve =>
setTimeout(() => resolve(res.value), 1000)
)};
return res;
};
return it;
}