I am working on a project that needs an async function that's roughly equivalent to the following
async function task(url) {
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
//r2 is an array of urls
var total = 0;
for (var u of r2) {
tmp = await fetch(u).then(resp => resp.text());
total += parseInt(tmp)
}
return total
}
The issue is that there are hundreds of elements in r2, each of the element is an URL. If I do it sequentially, this function will take a loooong time to complete. I would like to run 10 URLs concurrently (could be adjusted to other numbers), wonder how would I rewrite the async function.
Chunk the initial array into pieces of 10, then wait for each chunk to complete with Promise.all before starting the next one:
async function getTotal(urlPart, subArr) {
const resps = await Promise.all(subArr.map(url =>
fetch(url).then(resp => resp.json())
))
return resps.reduce((a, b) => a + b);
}
async function task(url) {
const r1 = await fetch(url).then(resp => resp.text());
const r2 = await fetch(url + "/" + r1).then(resp => resp.json());
const chunks = [];
const { length } = r2
for (let i = 0; i < length; i += 10) {
chunks.push(r2.slice(i, i + 10));
}
let total = 0;
for (const subArr of chunks) {
total += await getTotal(urlPart, subarr);
}
return total;
}
Here's some code I created years ago that allows you to create a "parallel" queue
const makeQueue = length => {
length = (isNaN(length) || length < 1) ? 1 : length;
const q = Array.from({length}, () => Promise.resolve());
let index = 0;
const add = cb => {
index = (index + 1) % length;
return (q[index] = q[index].then(() => cb()));
};
return add;
};
This will allow up to 10 simultaneous requests (or whatever you pass in as the argument)
In your code, I guess you could use it like
async function task(url) {
const q = makeQueue(10); // 10 requests at a time
var r1 = await fetch(url).then(resp => resp.text());
var r2 = await fetch(url + "/" + r1).then(resp => resp.json());
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text())))).then(v => v.map(parseInt).reduce((a, b) => a+b));
}
the return can also be
return Promise.all(r2.map(u => q(() => fetch(u).then(resp => resp.text()).then(parseInt)))).then(v => v.reduce((a, b) => a+b));
broken down that is equivalent of
const fetch1 = u => fetch(u).then(resp => resp.text()).then(parseInt);
const promises = r2.map(u => q(() => fetch1(u)));
return Promise.all(promises).then(v => v.reduce((a, b) => a+b));
The benefit of this method is that there should be 10 requests "on the go" for a maximum amount of time
Note, browsers tend to limit the number of simultaneous requests per host, so you may not see any improvement with queue size greater than 6 (I think that's the most common limit)
Appreciate all the good answers here! I studied them and come up with the following solution which I think is slightly simpler (for many of us beginners) :-)
This solution doesn't divid all the url-fetching jobs in the beginning because it's uncertain how much time each url-fetching will take.
Instead it makes each worker go through all the urls, if a url is assigned to another worker, it will just move on to next one.
var tasks
var total = 0
var gId = 0
var workerId
manager(4)
async function manager(numOfWorkers) {
var workers = []
tasks = r2.map(function(u) {return {id: gId++, assigned: -1, url: u }})
for (var i=0; i<numOfWorkers; i++) { workers.push(worker()) }
await Promise.all(workers)
console.log(total)
}
async function worker() {
var wid = workerId; workerId ++;
var tmp;
for (var u of tasks) {
if (u.assigned == -1) {
u.assigned = wid;
console.log("unit " + u.id + " assigned to " + wid)
tmp = await fetch(u.url).then(r=>r.text())
total += parseInt(tmp);
}
}
}
In short, ditch the await. By using await, you are literally telling it to wait here until it is done with this one thing.
If you want to parallelize them, make use of Promise.all(). Any async function returns a Promise which can still be used like a normal Promise. Promise.all() accepts an array of Promise objects, and will call then() once all of those requests are done, giving you an array of the results from each.
You could do something like this:
const urls = [/* bunch of URLs */];
Promise.all(
urls.map(url =>
fetch(url).then(res => res.text())
)
).then(results => /* do something with results */)
In this case, results will be an array of the results from your various requests, in the same order as they were passed in.
Now, if you want to be able to have a specific number of them running at a time, you'd want to change it up a bit and have some limits on what's going on.
I usually use a technique which just uses a simple counter to keep track of how many are active, and then fires off more when they are done.
You can do something like this:
// dummy fetch for example purposes, resolves between .2 and 3 seconds
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
const inputUrls = ['a', 'b', 'c', 'd', 'e', 'f', 'g'];
const limit = 2; // this sets the limit of how many can run at once, set to 10 to run 10 concurrently
const delay = 100; // delay in ms between each batch starting
function fetchAll(urls) {
let active = 0;
let queue = urls.slice(0); // clone urls
// inner function so urls and results can be shared with all calls
function fetchAllInner() {
if (active < limit && queue.length) {
const count = Math.min(limit - active, queue.length);
const urlsThisBatch = queue.slice(0, count);
queue = queue.slice(count); // remaining
return Promise.all(
urlsThisBatch.map(url => {
active++; // increment active
console.log('start', url);
return fakeFetch(url)
.then(r => {
console.log('done', url);
active--; // decrement active
return new Promise(resolve => // new Promise to promisify setTimeout
setTimeout(() =>
resolve(fetchAllInner() // kicks off run again when one finishes
.then(fetchR => [].concat(r, fetchR)) // combine them
), delay
)
);
})
})
).then(r => r.reduce((a, u) => [].concat(u, a), [])); // flatten from Promise.all()
}
return Promise.resolve([]); // final resolve
}
return fetchAllInner();
}
fetchAll(inputUrls)
.then(results => console.log('all done', results));
In a nutshell, what this is doing is it'll create a Promise.all() for a batch (however many we can start up until we hit our limit). Then, when one finishes, it'll set a timeout to start up another batch by recursively calling the same function. It's wrapped in another function simply to avoid having to have some variables be global.
This also has an added delay if you want, so you can throttle how many requests you'll make and not hammer the system too bad. If you don't want to use a delay, you can just set it to 0 or remove the new Promise(resolve => setTimeout bit.
The above version is a bit verbose to make it easier to understand. Here is a more "production-ready" version (be sure to switch fakeFetch to fetch and handle calling res.text())
const fakeFetch = url => new Promise(resolve => setTimeout(() => resolve(url), Math.random() * 2800 + 200));
function fetchAll(urls, limit = 10, delay = 200) {
let active = 0;
const queue = urls.splice(0);
function fetchAllInner() {
if (active >= limit || !queue.length) {
return Promise.resolve([]);
}
const count = Math.min(limit - active, queue.length);
active = limit;
return Promise.all(
queue.splice(0, count)
.map(url => fakeFetch(url)
.then(r => {
active--;
return new Promise(resolve =>
setTimeout(() => resolve(
fetchAllInner().then(fetchR => [].concat(r, fetchR))
), delay)
);
})
)
).then(r =>
r.reduce((a, u) => [].concat(u, a), []));
}
return fetchAllInner();
}
console.log('give it a few seconds');
fetchAll(['a', 'b', 'c', 'd', 'e', 'f', 'g'])
.then(r => console.log('all done', r))
Related
get-video-duration is a npm module that get the duration of video.
const { getVideoDurationInSeconds } = require('get-video-duration')
// From a local path...
getVideoDurationInSeconds('video.mov').then((duration) => {
console.log(duration)
})
I want to use this module to get the total duration of all videos from an Array of video pathes.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
}
The thing is getVideoDurationInSeconds is Asynchronous, I can't just simply return the result.
function getTotals(video_Array) {
let total_duration = 0;
video_Array.forEach(video => {
getVideoDurationInSeconds(video).then(duration => {
total_duration += duration;
})
})
return total_duration;
}
How can I get the final result? Thank you in advance!
create a function which returns a promise
and then use it to calculate total duration
function getTotals(video_Array) {
let video_ArrayPromises=video_Array.map(video=>
getVideoDurationInSeconds(video));
return Promise.all([video_ArrayPromises]).then((values) => {
//Calculate TotalDuration
return duratons.reduce((accumulator, currentValue) => accumulator + currentValue);
});
}
getTotals(['movie1.mov','movie2.mov']).then(totalDuration => {
//use total duration
});
Create an array of getVideoDurationInSeconds promises with map, then reduce over the values returned by Promise.all to get your final total.
Additional documentation
async/await
// Mock function that returns a promise.
// When it's resolved it will return a random number
// muliplied by the element passed in through the function arguments
function getVideoDurationInSeconds(el) {
const rnd = Math.floor(Math.random() * (10 - 1) + 1);
return new Promise((res, rej) => {
setTimeout(() => {
console.log(el * rnd);
res(el * rnd);
}, 1000);
});
}
async function getTotals(videoArray) {
// `map` over the elements of the video array
// and create a new array of promises
const promises = videoArray.map(el => getVideoDurationInSeconds(el));
// Wait until all the promises have resolved
const data = await Promise.all(promises);
// Then return the sum of each total in the data array
return data.reduce((acc, c) => acc += c, 0);
}
(async function main() {
console.log(`Total: ${await getTotals([1, 2, 3, 4])}`);
}());
Return an array of requests and use reduce to get the total time.
// Array
function getTotalTime() {
return videoList.map(async (video) => await getVideoDurationInSeconds(video));
}
// Just invoke whereever you want..
await Promise.all(getTotalTime()).then(result => {
let totalTime = result.reduce((acc, cv) => acc + cv, 0); // 0 = default value
})
My goal is to make a video playlist generator that runs at specific times, along with a running clock and many other things that run periodically, but I'm stuck at the part where it generates the playlist at the times I define.
I'm using FileHound to walk a folder, which works whenever I call the find() method, but as it's Promise driven, its results aren't readily available to be used by the following console.log(), for example. Same thing happens with MediaInfo(), but this time I was able to work around it with async/await, which as far as I know is the actual way to use a Promise based function.
Now, as far as I can understand, the .each() and .then() method chaining are the ways to use the results from a Promise driven function, but that would quickly result in code repetition for every time I want to do the same thing in different places.
All being said, I think I'm off my track by very far, and despite my efforts I can't seem to find a clear way to achieve what I want, so I'm asking for help. This is the code I have so far, and I'm using the following npm packages:
node-mediainfo
filehound
moment
const MediaInfo = require("node-mediainfo");
const Path = require("path");
const FileHound = require("filehound");
const Moment = require("moment");
const START_TIME = Moment();
const END_TIME = null;
const VIDEO_PATH = "videos";
let files = FileHound.create()
.path(VIDEO_PATH)
.depth(0);
let playlist = [];
let start_time = START_TIME.add(10, "seconds");
const ArrayShuffle = (array) => {
for (let i = array.length - 1; i > 0; i--) {
let j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
};
const MakePlaylist = async (file) => {
let fileinfo = await MediaInfo(Path.resolve(file));
let duration = fileinfo.media.track[0].Duration;
let title = fileinfo.media.track[0].Title || Path.basename(file, Path.extname(file));
let artist = fileinfo.media.track[0].Performer || null;
playlist.push({
path : Path.resolve(file),
title : title,
artist : artist,
duration: duration
});
};
console.log(start_time.toLocaleString());
/* Main Loop */
setInterval(() => {
if (Moment().isSame(start_time)) {
console.log("First Run");
files.find().each(MakePlaylist).then(ArrayShuffle);
console.log(playlist);
}
if (Moment().isSame(Moment(start_time).add(30, "seconds"))) {
console.log("Second Run");
playlist = [];
files.find().each(MakePlaylist).then(ArrayShuffle);
console.log(playlist);
}
}, 1);
I would be tempted to put your logic into an async function, making use of await. and then simply recall your function later, instead of trying to do it in setTimeout
Assuming files.find() returns a promise....
const GeneratePlaylist = async () => {
var files = await files.find();
var playlist = [];
for(var i=0;i<files.length;i++){
playlist.push(await MakePlaylist(files[i]));
}
ArrayShuffle(playlist);
return playlist;
}
Then you can use that from another async function, and you can recall it 1ms later with setTimeout (Note, you should make MakePlaylist return the new item, not push to a global array):
const doMyThing = async (){
if (Moment().isSame(start_time)) {
console.log("First Run");
playlist = await GeneratePlaylist();
console.log(playlist);
}
if (Moment().isSame(Moment(start_time).add(30, "seconds"))) {
console.log("Second Run");
playlist = [];
playlist = await GeneratePlaylist();
console.log(playlist);
}
setTimeout(doMyThing,1);
}
Below is a working example, where I've just faked up some of your functionality using Promises to simulate asynchronous work like finding files and loading the media info:
const ArrayShuffle = (array) => {
for (let i = array.length - 1; i > 0; i--) {
let j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
};
const MakePlaylist = async (file) => {
let fileinfo = await new Promise(resolve => setTimeout(() => resolve({media:{track:[{Duration:1,Title:"Title",Performer:"Foo"}]}}),1000));
let duration = fileinfo.media.track[0].Duration;
let title = fileinfo.media.track[0].Title || Path.basename(file, Path.extname(file));
let artist = fileinfo.media.track[0].Performer || null;
return{
path : file,
title : title,
artist : artist,
duration: duration
};
};
const findFileMockup = () => {
return new Promise(resolve => setTimeout(() => {
resolve(["file1.txt","file2.txt","file3.txt"]);
},500));
}
const GeneratePlaylist = async () => {
var files = await findFileMockup(); // await files.find() in your code
var playlist = [];
for(var i=0;i<files.length;i++){
playlist.push(await MakePlaylist(files[i]));
}
ArrayShuffle(playlist);
return playlist;
}
const test = async () => {
var playlist = await GeneratePlaylist();
console.log(playlist);
}
test();
I need to implement the setInterval function within my program in order to create an interval of 500ms between each value appearing on screen. The program code i am currently using takes an input value from the user and writes out, as well as calculates the factorized answer.
function factorizeFunction(number, numberArray = []) { //this is the function that does the factorization calculations
if (number == 0 || number == 1) numberArray.push(number);
else {
numberArray.push(number);
factorizeFunction(number - 1, numberArray);
}
return numberArray.join(' * ') + ' = ' + numberArray.reduce((a, b) => a * b, 1);
}
document.getElementById("factorialTest").innerHTML = factorizeFunction(number);
I need to have the answer, for Eg: 6 * 5 * 4 * 3 * 2 * 1 = 720 display on my webpage, each value at a time, with a 500ms delay in between displays. So, 6 (500ms delay) * (500ms delay) 5 (500ms delay)... and so forth. I am unsure on how i would go about doing this?
Basically, everything you want printed on your screen, the operators and numbers, need to appear in a 500ms delay. Which means it's best to first obtain an array of these elements:
function getFactorElements(factor) {
// get array like [1, 2, 3, 4]
const factorArray = Array.from({ length: factor }, (_, i) => i + 1);
// calculate result
const result = factorArray.reduce((a, b) => a * b, 1);
// insert math operators
const parts = factorArray
.reverse()
.map((part) => [ part, part > 1 ? '*' : '=' ])
.reduce((a, b) => a.concat(b));
parts.push(result);
return parts;
}
With this being your HTML
<div id="factorialTest"></div>
You can create another function which accepts an element and the factor:
async function showFactorCalculation(element, factor) {
element.innerHTML = '';
for (const part of getFactorElements(factor)) {
element.innerHTML += ' ' + part;
await new Promise((resolve) => setTimeout(resolve, 500));
}
}
Which you should call like this:
showFactorCalculation(document.getElementById('factorialTest'), 9);
And I've got a working stack for you here ;):
stack
I'm using setTimeout because using setInterval is mainly discouraged
You could envelope your function with setInterval():
function startInterval(number, numberArray){
setInterval(() => factorizeFunction(number, numberArray), 500);
}
And call startInterval instead of factorizeFuncion when you want the webpage to start showing the numbers.
It was a good excercise.
const factorizeFunction = async num => {
if (num < 1) return;
let resDiv = document.querySelector('#res_div');
let numArr = [];
while (num >= 1)
numArr.push(num--);
for (let i = 0; i < numArr.length; i++) {
resDiv.append(numArr[i]);
await _delay(500);
if (i < (numArr.length - 1))
resDiv.append(' * ');
else if (i == (numArr.length - 1))
resDiv.append(' = ');
await _delay(500);
}
resDiv.append(numArr.reduce((a, b) => a * b, 1));
};
let _delay = (ms = 200) => new Promise((accept) => setTimeout(accept, ms));
factorizeFunction(6);
<div id="res_div"></div>
Three different approaches.
inSequence constructs a promise chain:
const factorizeFunction = () => '6 * 5 * 4 * 3 * 2 * 1 = 720' // your existing function
const inSequence = async (arr) => arr.reduce((acc, f) => acc.then(f), Promise.resolve())
const delayed = (fn, ms = 500) => (...args) => () => new Promise((res) => setTimeout(() => { res(fn(...args)) }, ms))
const tokenize = (str) => str.match(/[\S]+[\s]?/g)
const print = (str) => document.getElementById("output").innerHTML += str
const printFactorialString = (str) => inSequence(tokenize(str).map((t) => delayed(print)(t)))
printFactorialString(factorizeFunction())
<div id="output"><div>
The following approach uses indirect recursion:
const factorizeFunction = () => '6 * 5 * 4 * 3 * 2 * 1 = 720' // your existing function
const delay = (ms = 500) => new Promise((resolve) => setTimeout(resolve, ms))
const slowly = (action) => {
return async function go ([i, ...remaining]) {
if(!i) return
action(i)
await delay()
go(remaining)
}
}
const tokenize = (str) => str.match(/[\S]+[\s]?/g)
const print = (str) => document.getElementById("output").innerHTML += str
const printFactorialString = (str) => slowly(print)(tokenize(str))
printFactorialString(factorizeFunction())
<div id="output"></div>
This solution uses an async generator and async...await:
const factorizeFunction = () => '6 * 5 * 4 * 3 * 2 * 1 = 720' // your existing function
const identity = (x) => x
const delayed = (fn, ms = 500) => (...args) => () => new Promise((res) => setTimeout(() => { res(fn(...args)) }, ms))
const asyncIterable = (arr) => async function* () { for(let i of arr) yield i() }
const tokenize = (str) => str.match(/[\S]+[\s]?/g)
const print = (str) => document.getElementById("output").innerHTML += str
const printFactorialString = async (str) => {
const generator = asyncIterable(tokenize(str).map((t) => delayed(identity)(t)))()
for await (let x of generator) { print(x) }
}
printFactorialString(factorizeFunction())
<div id="output"></div>
There are 100 promises in an array and we need to process 5 at a time in JS. how to achieve this?
(Asked in Microsoft interview)
Use a pool. There are a number of implementations in JS, such as this one that has a nice looking API:
const PromisePool = require("async-promise-pool");
// concurrency is the only option for PromisePool and enables you to
// choose how many promises will run at once
const pool = new PromisePool({ concurrency: 3 });
// elsewhere add functions to the pool that produce promises. We use
// functions here to prevent the promises from immediately executing.
pool.add(() => thingThatReturnsAPromise());
// you can await pool.all to ensure that all promises in the pool are
// resolved before continuing.
await pool.all();
I would use a function to execute promises in sequence instead of parallel. Once done, create an array of groups of 5 to solve in parallel using Promise.all:
const PROMISES_AMOUNT = 100
const GROUP_AMOUNT = 5
// Function to divide the array in various chuncks of similar size
function chunkArray(myArray, chunk_size){
let index = 0;
let arrayLength = myArray.length;
let tempArray = [];
for (index = 0; index < arrayLength; index += chunk_size) {
myChunk = myArray.slice(index, index+chunk_size);
// Do something if you want with the group
tempArray.push(myChunk);
}
return tempArray;
}
// the promise we will use
function interval(index) {
return new Promise(function(resolve, reject) {
const time = index*100
setTimeout(function() {
console.log(`Waited ${time}!`)
resolve(index);
}, time)
})
};
// Our array of 100 promises
const promises = new Array(PROMISES_AMOUNT).fill(null).map((_, index) => interval(index ))
// The array of 100 promises divided by groups of 5 elements
const groupedPromises = chunkArray(promises, GROUP_AMOUNT).map((promisesGroup) => () => Promise.all(promisesGroup))
// A function to divide an array
function chunkArray(myArray, chunk_size){
var index = 0;
var arrayLength = myArray.length;
var tempArray = [];
for (index = 0; index < arrayLength; index += chunk_size) {
myChunk = myArray.slice(index, index+chunk_size);
// Do something if you want with the group
tempArray.push(myChunk);
}
return tempArray;
}
// A function to execute promises in sequence
const promisesInSequence = (arrayOfTasks) => {
let results = []
return new Promise((resolve, reject) => {
const resolveNext = (arrayOfTasks) => {
// If all tasks are already resolved, return the final array of results
if (arrayOfTasks.length === 0) return resolve(results)
// Extract first promise and solve it
const first = arrayOfTasks.shift()
first().then((res) => {
console.log('Solved a group in parallel: ', res)
results.push(res)
resolveNext(arrayOfTasks)
}).catch((err) => {
reject(err)
})
}
resolveNext(arrayOfTasks)
})
}
promisesInSequence(groupedPromises)
.then((result) => console.log(result))
Code sample should explain better than words can:
const spawn = require('child_process').spawn;
const start = new Date();
(async()=>{
const proc = spawn('( echo a; >&2 echo b; sleep 1; echo c; >&2 echo d )', { shell:true });
proc.stdout.setEncoding('utf8');
proc.stderr.setEncoding('utf8');
for await (const data of proc.stdout) {
console.log(new Date() - start, "proc stdout:", data);
}
for await (const data of proc.stderr) {
console.log(new Date() - start, "proc stderr:", data);
}
})();
The output here sees the stderr coming out at the end, which can be fine for many use cases, but I'm very curious about how to get undelayed output from both streams. For example, the behavior observed is:
5 'proc stdout:' 'a\n'
1006 'proc stdout:' 'c\n'
1009 'proc stderr:' 'b\nd\n'
This makes sense because the async flow execution does not reach the second for loop until stdout is fully consumed.
I'm imagining that Promise.all or race could be used to construct a way to achieve what I want, but it's not materializing in front of me. Also, are for await loops the only way to access async iterables cleanly?
Put each for await into an async IIFE, so you can get a Promise out of each. Then you can call Promise.all or Promise.race on them:
proc.stdout.setEncoding('utf8');
proc.stderr.setEncoding('utf8');
const stdoutProm = (async () => {
for await (const data of proc.stdout) {
console.log(new Date() - start, "proc stdout:", data);
}
})();
const stderrProm = (async () => {
for await (const data of proc.stderr) {
console.log(new Date() - start, "proc stderr:", data);
}
})();
await Promise.race([stdoutProm, stderrProm]);
// One of the iterators has been completely consumed
You could also combine the two iterators into one:
async function* combine(a, b) {
a = a[Symbol.iterator](); b = b[Symbol.iterator]();
let doneA = false, doneB = false;
let itA = a.next().then(a => ({ a })),
itB = b.next().then(b => ({ b }));
while(true) {
const result = await Promise.race([itA, itB]);
doneA = doneA || result.a && result.a.done;
doneB = doneB || result.b && result.b.done;
if(doneA && doneB) return;
yield [result.a && result.a.value, result.b && result.b.value];
if(result.a) itA = a.next().then(a => ({ a }))
else itB = b.next().then(b => ({ b }));
}
}
Usable as:
for await(const [out, err] of combine(std.out, std.err)) {
if(out) console.log(out);
if(err) console.log(err);
}