JavaScript array .reduce with async/await - javascript

Seem to be having some issues incorporating async/await with .reduce(), like so:
const data = await bodies.reduce(async(accum, current, index) => {
const methodName = methods[index]
const method = this[methodName]
if (methodName == 'foo') {
current.cover = await this.store(current.cover, id)
console.log(current)
return {
...accum,
...current
}
}
return {
...accum,
...method(current.data)
}
}, {})
console.log(data)
The data object is logged before the this.store completes...
I know you can utilise Promise.all with async loops, but does that apply to .reduce()?

The problem is that your accumulator values are promises - they're return values of async functions. To get sequential evaluation (and all but the last iteration to be awaited at all), you need to use
const data = await array.reduce(async (accumP, current, index) => {
const accum = await accumP;
…
}, Promise.resolve(…));
That said, for async/await I would in general recommend to use plain loops instead of array iteration methods, they're more performant and often simpler.

I like Bergi's answer, I think it's the right way to go.
I'd also like to mention a library of mine, called Awaity.js
Which lets you effortlessly use functions like reduce, map & filter with async / await:
import reduce from 'awaity/reduce';
const posts = await reduce([1,2,3], async (posts, id) => {
const res = await fetch('/api/posts/' + id);
const post = await res.json();
return {
...posts,
[id]: post
};
}, {})
posts // { 1: { ... }, 2: { ... }, 3: { ... } }

[Not addressing OPs exact prob; focused on others who land here.]
Reduce is commonly used when you need the result of the previous steps before you can process the next. In that case, you can string promises together a la:
promise = elts.reduce(
async (promise, elt) => {
return promise.then(async last => {
return await f(last, elt)
})
}, Promise.resolve(0)) // or "" or [] or ...
Here's an example with uses fs.promise.mkdir() (sure, much simpler to use mkdirSync, but in my case, it's across a network):
const Path = require('path')
const Fs = require('fs')
async function mkdirs (path) {
return path.split(/\//).filter(d => !!d).reduce(
async (promise, dir) => {
return promise.then(async parent => {
const ret = Path.join(parent, dir);
try {
await Fs.promises.lstat(ret)
} catch (e) {
console.log(`mkdir(${ret})`)
await Fs.promises.mkdir(ret)
}
return ret
})
}, Promise.resolve(""))
}
mkdirs('dir1/dir2/dir3')
Below is another example which add 100 + 200 ... 500 and waits around a bit:
async function slowCounter () {
const ret = await ([100, 200, 300, 400, 500]).reduce(
async (promise, wait, idx) => {
return promise.then(async last => {
const ret = last + wait
console.log(`${idx}: waiting ${wait}ms to return ${ret}`)
await new Promise((res, rej) => setTimeout(res, wait))
return ret
})
}, Promise.resolve(0))
console.log(ret)
}
slowCounter ()

The current accepted answer advises to use Promise.all() instead of an async reduce. However this does not have the same behavior as an async reduce and is only relevant for the case where you want an exception to stop all iterations immediately, which is not always the case.
Additionally in the comments of that answer it's suggested that you should always await the accumulator as the first statement in the reducer, because otherwise you might risk unhandled promise rejections. The poster also says that this was what the OP is asking for, which is not the case. Instead he just wants to know when everything is done. In order to know that you indeed need to do await acc, but this could be at any point in the reducer.
const reducer = async(acc, key) => {
const response = await api(item);
return {
...await acc, // <-- this would work just as well for OP
[key]: response,
}
}
const result = await ['a', 'b', 'c', 'd'].reduce(reducer, {});
console.log(result); // <-- Will be the final result
How to safely use async reduce
That being said, using a reducer this way does mean that you need to guarantee it does not throw, else you will get "unhandled promise rejections". It's perfectly possible to ensure this by using a try-catch, with the catch block returning the accumulator (optionally with a record for the failed API call).
const reducer = async (acc, key) => {
try {
data = await doSlowTask(key);
return {...await acc, [key]: data};
} catch (error) {
return {...await acc, [key]: {error}};
};
}
const result = await ['a', 'b', 'c','d'].reduce(reducer, {});
Difference with Promise.allSettled
You can get close to the behavior of an async reduce (with error catching) by using Promise.allSettled. However this is clunky to use: you need to add another synchronous reduce after it if you want to reduce to an object.
The theoretical time complexity is also higher for Promise.allSettled + regular reduce, though there are probably very few use cases where this will make a difference. async reduce can start accumulating from the moment the first item is done, whereas a reduce after Promise.allSettled is blocked until all promises are fulfilled. This could make a difference when looping over a very large amount of elements.
const responseTime = 200; //ms
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const api = async (key) => {
console.log(`Calling API for ${ key }`);
// Boz is a slow endpoint.
await sleep(key === 'boz' ? 800 : responseTime);
console.log(`Got response for ${ key }`);
if (key === 'bar') throw new Error(`It doesn't work for ${ key }`);
return {
[key]: `API says ${ key }`,
};
};
const keys = ['foo', 'bar', 'baz', 'buz', 'boz'];
const reducer = async (acc, key) => {
let data;
try {
const response = await api(key);
data = {
apiData: response
};
} catch (e) {
data = {
error: e.message
};
}
// OP doesn't care how this works, he only wants to know when the whole thing is ready.
const previous = await acc;
console.log(`Got previous for ${ key }`);
return {
...previous,
[key]: {
...data
},
};
};
(async () => {
const start = performance.now();
const result = await keys.reduce(reducer, {});
console.log(`After ${ performance.now() - start }ms`, result); // <-- OP wants to execute things when it's ready.
})();
Check the order of execution with Promise.allSettled:
const responseTime = 200; //ms
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const api = async (key) => {
console.log(`Calling API for ${ key }`);
// Boz is a slow endpoint.
await sleep(key === 'boz' ? 800 : responseTime);
console.log(`Got response for ${ key }`);
if (key === 'bar') throw new Error(`It doesn't work for ${ key }`);
return {
key,
data: `API says ${ key }`,
};
};
const keys = ['foo', 'bar', 'baz', 'buz', 'boz'];
(async () => {
const start = performance.now();
const apiResponses = await Promise.allSettled(keys.map(api));
const result = apiResponses.reduce((acc, {status, reason, value}) => {
const {key, data} = value || {};
console.log(`Got previous for ${ key }`);
return {
...acc,
[key]: status === 'fulfilled' ? {apiData: data} : {error: reason.message},
};
}, {});
console.log(`After ${ performance.now() - start }ms`, result); // <-- OP wants to execute things when it's ready.
})();

Sometimes the best thing to do is simply put both code versions side by side, sync and async:
Sync version:
const arr = [1, 2, 3, 4, 5];
const syncRev = arr.reduce((acc, i) => [i, ...acc], []); // [5, 4, 3, 2, 1]
Async one:
(async () => {
const asyncRev = await arr.reduce(async (promisedAcc, i) => {
const id = await asyncIdentity(i); // could be id = i, just stubbing async op.
const acc = await promisedAcc;
return [id, ...acc];
}, Promise.resolve([])); // [5, 4, 3, 2, 1]
})();
//async stuff
async function asyncIdentity(id) {
return Promise.resolve(id);
}
const arr = [1, 2, 3, 4, 5];
(async () => {
const asyncRev = await arr.reduce(async (promisedAcc, i) => {
const id = await asyncIdentity(i);
const acc = await promisedAcc;
return [id, ...acc];
}, Promise.resolve([]));
console.log('asyncRev :>> ', asyncRev);
})();
const syncRev = arr.reduce((acc, i) => [i, ...acc], []);
console.log('syncRev :>> ', syncRev);
async function asyncIdentity(id) {
return Promise.resolve(id);
}

For typescript previous value and initial value need to be same.
const data = await array.reduce(async (accumP: Promise<Tout>, curr<Tin>) => {
const accum: Tout = await accumP;
doSomeStuff...
return accum;
}, Promise<Tout>.resolve({} as Tout);

You can wrap your entire map/reduce iterator blocks into their own Promise.resolve and await on that to complete. The issue, though, is that the accumulator doesn't contain the resulting data/object you'd expect on each iteration. Due to the internal async/await/Promise chain, the accumulator will be actual Promises themselves that likely have yet to resolve themselves despite using an await keyword before your call to the store (which might lead you to believe that the iteration won't actually return until that call completes and the accumulator is updated.
While this is not the most elegant solution, one option you have is to move your data object variable out of scope and assign it as a let so that proper binding and mutation can occur. Then update this data object from inside your iterator as the async/await/Promise calls resolve.
/* allow the result object to be initialized outside of scope
rather than trying to spread results into your accumulator on iterations,
else your results will not be maintained as expected within the
internal async/await/Promise chain.
*/
let data = {};
await Promise.resolve(bodies.reduce(async(accum, current, index) => {
const methodName = methods[index]
const method = this[methodName];
if (methodName == 'foo') {
// note: this extra Promise.resolve may not be entirely necessary
const cover = await Promise.resolve(this.store(current.cover, id));
current.cover = cover;
console.log(current);
data = {
...data,
...current,
};
return data;
}
data = {
...data,
...method(current.data)
};
return data;
}, {});
console.log(data);

export const addMultiTextData = async(data) => {
const textData = await data.reduce(async(a, {
currentObject,
selectedValue
}) => {
const {
error,
errorMessage
} = await validate(selectedValue, currentObject);
return {
...await a,
[currentObject.id]: {
text: selectedValue,
error,
errorMessage
}
};
}, {});
};

Here's how to make async reduce:
async function asyncReduce(arr, fn, initialValue) {
let temp = initialValue;
for (let idx = 0; idx < arr.length; idx += 1) {
const cur = arr[idx];
temp = await fn(temp, cur, idx);
}
return temp;
}

Another classic option with Bluebird
const promise = require('bluebird');
promise.reduce([1,2,3], (agg, x) => Promise.resolve(agg+x),0).then(console.log);
// Expected to product sum 6

My solution for .reduce in typescript
Thanks to this person
https://dev.to/arnaudcourtecuisse/comment/1el22
const userOrders = await existUsersWithName.reduce(
async (promise, existUserAndName) => {
const acc = await promise;
const {user, name} = existUserAndName;
// My async function
acc[user] = await this.users.getOrders(name);
return promise;
},
<Promise<Record<string, string[] | undefined>>>{}
);

Related

Issue making async operations inside a reduce [duplicate]

Seem to be having some issues incorporating async/await with .reduce(), like so:
const data = await bodies.reduce(async(accum, current, index) => {
const methodName = methods[index]
const method = this[methodName]
if (methodName == 'foo') {
current.cover = await this.store(current.cover, id)
console.log(current)
return {
...accum,
...current
}
}
return {
...accum,
...method(current.data)
}
}, {})
console.log(data)
The data object is logged before the this.store completes...
I know you can utilise Promise.all with async loops, but does that apply to .reduce()?
The problem is that your accumulator values are promises - they're return values of async functions. To get sequential evaluation (and all but the last iteration to be awaited at all), you need to use
const data = await array.reduce(async (accumP, current, index) => {
const accum = await accumP;
…
}, Promise.resolve(…));
That said, for async/await I would in general recommend to use plain loops instead of array iteration methods, they're more performant and often simpler.
I like Bergi's answer, I think it's the right way to go.
I'd also like to mention a library of mine, called Awaity.js
Which lets you effortlessly use functions like reduce, map & filter with async / await:
import reduce from 'awaity/reduce';
const posts = await reduce([1,2,3], async (posts, id) => {
const res = await fetch('/api/posts/' + id);
const post = await res.json();
return {
...posts,
[id]: post
};
}, {})
posts // { 1: { ... }, 2: { ... }, 3: { ... } }
[Not addressing OPs exact prob; focused on others who land here.]
Reduce is commonly used when you need the result of the previous steps before you can process the next. In that case, you can string promises together a la:
promise = elts.reduce(
async (promise, elt) => {
return promise.then(async last => {
return await f(last, elt)
})
}, Promise.resolve(0)) // or "" or [] or ...
Here's an example with uses fs.promise.mkdir() (sure, much simpler to use mkdirSync, but in my case, it's across a network):
const Path = require('path')
const Fs = require('fs')
async function mkdirs (path) {
return path.split(/\//).filter(d => !!d).reduce(
async (promise, dir) => {
return promise.then(async parent => {
const ret = Path.join(parent, dir);
try {
await Fs.promises.lstat(ret)
} catch (e) {
console.log(`mkdir(${ret})`)
await Fs.promises.mkdir(ret)
}
return ret
})
}, Promise.resolve(""))
}
mkdirs('dir1/dir2/dir3')
Below is another example which add 100 + 200 ... 500 and waits around a bit:
async function slowCounter () {
const ret = await ([100, 200, 300, 400, 500]).reduce(
async (promise, wait, idx) => {
return promise.then(async last => {
const ret = last + wait
console.log(`${idx}: waiting ${wait}ms to return ${ret}`)
await new Promise((res, rej) => setTimeout(res, wait))
return ret
})
}, Promise.resolve(0))
console.log(ret)
}
slowCounter ()
The current accepted answer advises to use Promise.all() instead of an async reduce. However this does not have the same behavior as an async reduce and is only relevant for the case where you want an exception to stop all iterations immediately, which is not always the case.
Additionally in the comments of that answer it's suggested that you should always await the accumulator as the first statement in the reducer, because otherwise you might risk unhandled promise rejections. The poster also says that this was what the OP is asking for, which is not the case. Instead he just wants to know when everything is done. In order to know that you indeed need to do await acc, but this could be at any point in the reducer.
const reducer = async(acc, key) => {
const response = await api(item);
return {
...await acc, // <-- this would work just as well for OP
[key]: response,
}
}
const result = await ['a', 'b', 'c', 'd'].reduce(reducer, {});
console.log(result); // <-- Will be the final result
How to safely use async reduce
That being said, using a reducer this way does mean that you need to guarantee it does not throw, else you will get "unhandled promise rejections". It's perfectly possible to ensure this by using a try-catch, with the catch block returning the accumulator (optionally with a record for the failed API call).
const reducer = async (acc, key) => {
try {
data = await doSlowTask(key);
return {...await acc, [key]: data};
} catch (error) {
return {...await acc, [key]: {error}};
};
}
const result = await ['a', 'b', 'c','d'].reduce(reducer, {});
Difference with Promise.allSettled
You can get close to the behavior of an async reduce (with error catching) by using Promise.allSettled. However this is clunky to use: you need to add another synchronous reduce after it if you want to reduce to an object.
The theoretical time complexity is also higher for Promise.allSettled + regular reduce, though there are probably very few use cases where this will make a difference. async reduce can start accumulating from the moment the first item is done, whereas a reduce after Promise.allSettled is blocked until all promises are fulfilled. This could make a difference when looping over a very large amount of elements.
const responseTime = 200; //ms
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const api = async (key) => {
console.log(`Calling API for ${ key }`);
// Boz is a slow endpoint.
await sleep(key === 'boz' ? 800 : responseTime);
console.log(`Got response for ${ key }`);
if (key === 'bar') throw new Error(`It doesn't work for ${ key }`);
return {
[key]: `API says ${ key }`,
};
};
const keys = ['foo', 'bar', 'baz', 'buz', 'boz'];
const reducer = async (acc, key) => {
let data;
try {
const response = await api(key);
data = {
apiData: response
};
} catch (e) {
data = {
error: e.message
};
}
// OP doesn't care how this works, he only wants to know when the whole thing is ready.
const previous = await acc;
console.log(`Got previous for ${ key }`);
return {
...previous,
[key]: {
...data
},
};
};
(async () => {
const start = performance.now();
const result = await keys.reduce(reducer, {});
console.log(`After ${ performance.now() - start }ms`, result); // <-- OP wants to execute things when it's ready.
})();
Check the order of execution with Promise.allSettled:
const responseTime = 200; //ms
function sleep(ms) {
return new Promise(resolve => setTimeout(resolve, ms));
}
const api = async (key) => {
console.log(`Calling API for ${ key }`);
// Boz is a slow endpoint.
await sleep(key === 'boz' ? 800 : responseTime);
console.log(`Got response for ${ key }`);
if (key === 'bar') throw new Error(`It doesn't work for ${ key }`);
return {
key,
data: `API says ${ key }`,
};
};
const keys = ['foo', 'bar', 'baz', 'buz', 'boz'];
(async () => {
const start = performance.now();
const apiResponses = await Promise.allSettled(keys.map(api));
const result = apiResponses.reduce((acc, {status, reason, value}) => {
const {key, data} = value || {};
console.log(`Got previous for ${ key }`);
return {
...acc,
[key]: status === 'fulfilled' ? {apiData: data} : {error: reason.message},
};
}, {});
console.log(`After ${ performance.now() - start }ms`, result); // <-- OP wants to execute things when it's ready.
})();
Sometimes the best thing to do is simply put both code versions side by side, sync and async:
Sync version:
const arr = [1, 2, 3, 4, 5];
const syncRev = arr.reduce((acc, i) => [i, ...acc], []); // [5, 4, 3, 2, 1]
Async one:
(async () => {
const asyncRev = await arr.reduce(async (promisedAcc, i) => {
const id = await asyncIdentity(i); // could be id = i, just stubbing async op.
const acc = await promisedAcc;
return [id, ...acc];
}, Promise.resolve([])); // [5, 4, 3, 2, 1]
})();
//async stuff
async function asyncIdentity(id) {
return Promise.resolve(id);
}
const arr = [1, 2, 3, 4, 5];
(async () => {
const asyncRev = await arr.reduce(async (promisedAcc, i) => {
const id = await asyncIdentity(i);
const acc = await promisedAcc;
return [id, ...acc];
}, Promise.resolve([]));
console.log('asyncRev :>> ', asyncRev);
})();
const syncRev = arr.reduce((acc, i) => [i, ...acc], []);
console.log('syncRev :>> ', syncRev);
async function asyncIdentity(id) {
return Promise.resolve(id);
}
For typescript previous value and initial value need to be same.
const data = await array.reduce(async (accumP: Promise<Tout>, curr<Tin>) => {
const accum: Tout = await accumP;
doSomeStuff...
return accum;
}, Promise<Tout>.resolve({} as Tout);
You can wrap your entire map/reduce iterator blocks into their own Promise.resolve and await on that to complete. The issue, though, is that the accumulator doesn't contain the resulting data/object you'd expect on each iteration. Due to the internal async/await/Promise chain, the accumulator will be actual Promises themselves that likely have yet to resolve themselves despite using an await keyword before your call to the store (which might lead you to believe that the iteration won't actually return until that call completes and the accumulator is updated.
While this is not the most elegant solution, one option you have is to move your data object variable out of scope and assign it as a let so that proper binding and mutation can occur. Then update this data object from inside your iterator as the async/await/Promise calls resolve.
/* allow the result object to be initialized outside of scope
rather than trying to spread results into your accumulator on iterations,
else your results will not be maintained as expected within the
internal async/await/Promise chain.
*/
let data = {};
await Promise.resolve(bodies.reduce(async(accum, current, index) => {
const methodName = methods[index]
const method = this[methodName];
if (methodName == 'foo') {
// note: this extra Promise.resolve may not be entirely necessary
const cover = await Promise.resolve(this.store(current.cover, id));
current.cover = cover;
console.log(current);
data = {
...data,
...current,
};
return data;
}
data = {
...data,
...method(current.data)
};
return data;
}, {});
console.log(data);
export const addMultiTextData = async(data) => {
const textData = await data.reduce(async(a, {
currentObject,
selectedValue
}) => {
const {
error,
errorMessage
} = await validate(selectedValue, currentObject);
return {
...await a,
[currentObject.id]: {
text: selectedValue,
error,
errorMessage
}
};
}, {});
};
Here's how to make async reduce:
async function asyncReduce(arr, fn, initialValue) {
let temp = initialValue;
for (let idx = 0; idx < arr.length; idx += 1) {
const cur = arr[idx];
temp = await fn(temp, cur, idx);
}
return temp;
}
Another classic option with Bluebird
const promise = require('bluebird');
promise.reduce([1,2,3], (agg, x) => Promise.resolve(agg+x),0).then(console.log);
// Expected to product sum 6
My solution for .reduce in typescript
Thanks to this person
https://dev.to/arnaudcourtecuisse/comment/1el22
const userOrders = await existUsersWithName.reduce(
async (promise, existUserAndName) => {
const acc = await promise;
const {user, name} = existUserAndName;
// My async function
acc[user] = await this.users.getOrders(name);
return promise;
},
<Promise<Record<string, string[] | undefined>>>{}
);

Array defined outside of function not being populated after awaiting an async call

I have the following code where in the getStuff function I'm making an external async call and storing the result in result. From there I'm looping over result.Content.Ids and pushing the Ids in myArray.
When getStuff() gets called in run(), further down the code I need to access myStuff array but if I console.log it out, it shows up as empty.
const myArray = [];
const getStuff = async (val) => {
const other = {}
if(val) other.newestVal = val;
const result = await someCall(other);
result.Content.Ids.forEach((id) => myArray.push(id));
if (result.AFieldExists) {
getStuff(result.newVal)
}
}
const run = async () => {
await getStuff();
// other code below that uses myArray
// but its empty when I log it
}
run();
I'd have expected myArray to be populated since I'm awaiting getStuff() in run(). What am I doing wrong here?
Your recursive call:
if (result.AFieldExists) {
getStuff(result.newVal)
}
is incorrect, since you're not waiting for the result - only the first getStuff call will be waited for. You need:
const getStuff = async (val) => {
const other = {}
if(val) other.newestVal = val;
const result = await someCall(other);
result.Content.Ids.forEach((id) => myArray.push(id));
if (result.AFieldExists) {
return getStuff(result.newVal)
}
}
You can also clean it up a bit to avoid the ugly outer variable:
const getStuff = async (val, output = []) => {
const other = {}
if (val) other.newestVal = val;
const result = await someCall(other);
output.push(...result.Content.Ids);
return result.AFieldExists
? getStuff(result.newVal, output)
: output;
}
const run = async () => {
const output = await getStuff(); // pass initial value here?
}
you need to put your await calls in try{} catch(){},
and also your recursive call to getStuff() was with out the key word await :
async function someCall() {
return {
Content: {Ids: [1, 2, 3]}
}
}
const myArray = [];
const getStuff = async (val) => {
const other = {}
let result;
if(val) other.newestVal = val;
try{
result = await someCall(other);
}catch(err){
console.log('error from some call' ,err);
}
console.log(myArray)
result.Content.Ids.forEach((id) => myArray.push(id));
console.log(myArray)
if (result.AFieldExists) {
try{
await getStuff(result.newVal)
}catch(err){
console.log('error from get stuff in get stuff' , err);
}
}
}
const run = async () => {
console.log("run")
try{
await getStuff();
}catch(err){
console.log('error' , err);
}
console.log("end run")
console.log(myArray)
// other code below that uses myArray
// but its empty when I log it
}
run();

How can I filter a collection in parallel? [duplicate]

Given
let arr = [1,2,3];
function filter(num) {
return new Promise((res, rej) => {
setTimeout(() => {
if( num === 3 ) {
res(num);
} else {
rej();
}
}, 1);
});
}
function filterNums() {
return Promise.all(arr.filter(filter));
}
filterNums().then(results => {
let l = results.length;
// length should be 1, but is 3
});
The length is 3 because Promises are returned, not values. Is there a way to filter the array with a function that returns a Promise?
Note: For this example, fs.stat has been replaced with setTimeout, see https://github.com/silenceisgolden/learn-esnext/blob/array-filter-async-function/tutorials/array-filter-with-async-function.js for the specific code.
Here is a 2017 elegant solution using async/await :
Very straightforward usage:
const results = await filter(myArray, async num => {
await doAsyncStuff()
return num > 2
})
The helper function (copy this into your web page):
async function filter(arr, callback) {
const fail = Symbol()
return (await Promise.all(arr.map(async item => (await callback(item)) ? item : fail))).filter(i=>i!==fail)
}
Demo:
// Async IIFE
(async function() {
const myArray = [1, 2, 3, 4, 5]
// This is exactly what you'd expect to write
const results = await filter(myArray, async num => {
await doAsyncStuff()
return num > 2
})
console.log(results)
})()
// Arbitrary asynchronous function
function doAsyncStuff() {
return Promise.resolve()
}
// The helper function
async function filter(arr, callback) {
const fail = Symbol()
return (await Promise.all(arr.map(async item => (await callback(item)) ? item : fail))).filter(i=>i!==fail)
}
I'll even throw in a CodePen.
As mentioned in the comments, Array.prototype.filter is synchronous and therefore does not support Promises.
Since you can now (theoretically) subclass built-in types with ES6, you should be able to add your own asynchronous method which wraps the existing filter function:
Note: I've commented out the subclassing, because it's not supported by Babel just yet for Arrays
class AsyncArray /*extends Array*/ {
constructor(arr) {
this.data = arr; // In place of Array subclassing
}
filterAsync(predicate) {
// Take a copy of the array, it might mutate by the time we've finished
const data = Array.from(this.data);
// Transform all the elements into an array of promises using the predicate
// as the promise
return Promise.all(data.map((element, index) => predicate(element, index, data)))
// Use the result of the promises to call the underlying sync filter function
.then(result => {
return data.filter((element, index) => {
return result[index];
});
});
}
}
// Create an instance of your subclass instead
let arr = new AsyncArray([1,2,3,4,5]);
// Pass in your own predicate
arr.filterAsync(async (element) => {
return new Promise(res => {
setTimeout(() => {
res(element > 3);
}, 1);
});
}).then(result => {
console.log(result)
});
Babel REPL Demo
For typescript folk (or es6 just remove type syntax)
function mapAsync<T, U>(array: T[], callbackfn: (value: T, index: number, array: T[]) => Promise<U>): Promise<U[]> {
return Promise.all(array.map(callbackfn));
}
async function filterAsync<T>(array: T[], callbackfn: (value: T, index: number, array: T[]) => Promise<boolean>): Promise<T[]> {
const filterMap = await mapAsync(array, callbackfn);
return array.filter((value, index) => filterMap[index]);
}
es6
function mapAsync(array, callbackfn) {
return Promise.all(array.map(callbackfn));
}
async function filterAsync(array, callbackfn) {
const filterMap = await mapAsync(array, callbackfn);
return array.filter((value, index) => filterMap[index]);
}
es5
function mapAsync(array, callbackfn) {
return Promise.all(array.map(callbackfn));
}
function filterAsync(array, callbackfn) {
return mapAsync(array, callbackfn).then(filterMap => {
return array.filter((value, index) => filterMap[index]);
});
}
edit: demo
function mapAsync(array, callbackfn) {
return Promise.all(array.map(callbackfn));
}
function filterAsync(array, callbackfn) {
return mapAsync(array, callbackfn).then(filterMap => {
return array.filter((value, index) => filterMap[index]);
});
}
var arr = [1, 2, 3, 4];
function isThreeAsync(number) {
return new Promise((res, rej) => {
setTimeout(() => {
res(number === 3);
}, 1);
});
}
mapAsync(arr, isThreeAsync).then(result => {
console.log(result); // [ false, false, true, false ]
});
filterAsync(arr, isThreeAsync).then(result => {
console.log(result); // [ 3 ]
});
Here's a way:
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var filter = num => wait(1).then(() => num == 3);
var filterAsync = (array, filter) =>
Promise.all(array.map(entry => filter(entry)))
.then(bits => array.filter(entry => bits.shift()));
filterAsync([1,2,3], filter)
.then(results => console.log(results.length))
.catch(e => console.error(e));
The filterAsync function takes an array and a function that must either return true or false or return a promise that resolves to true or false, what you asked for (almost, I didn't overload promise rejection because I think that's a bad idea). Let me know if you have any questions about it.
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
var filter = num => wait(1).then(() => num == 3);
var filterAsync = (array, filter) =>
Promise.all(array.map(entry => filter(entry)))
.then(bits => array.filter(entry => bits.shift()));
filterAsync([1,2,3], filter)
.then(results => console.log(results.length))
.catch(e => console.error(e));
var console = { log: msg => div.innerHTML += msg + "<br>",
error: e => console.log(e +", "+ (e.lineNumber-25)) };
<div id="div"></div>
Promise Reducer to the rescue!
[1, 2, 3, 4].reduce((op, n) => {
return op.then(filteredNs => {
return new Promise(resolve => {
setTimeout(() => {
if (n >= 3) {
console.log("Keeping", n);
resolve(filteredNs.concat(n))
} else {
console.log("Dropping", n);
resolve(filteredNs);
}
}, 1000);
});
});
}, Promise.resolve([]))
.then(filteredNs => console.log(filteredNs));
Reducers are awesome. "Reduce my problem to my goal" seems to be a pretty good strategy for anything more complex than what the simple tools will solve for you, i.e. filtering an array of things that aren't all available immediately.
asyncFilter method:
Array.prototype.asyncFilter = async function(f){
var array = this;
var booleans = await Promise.all(array.map(f));
return array.filter((x,i)=>booleans[i])
}
Late to the game but since no one else mentioned it, Bluebird supports Promise.map which is my go-to for filters requiring aysnc processing for the condition,
function filterAsync(arr) {
return Promise.map(arr, num => {
if (num === 3) return num;
})
.filter(num => num !== undefined)
}
Two lines, completely typesafe
export const asyncFilter = async <T>(list: T[], predicate: (t: T) => Promise<boolean>) => {
const resolvedPredicates = await Promise.all(list.map(predicate));
return list.filter((item, idx) => resolvedPredicates[idx]);
};
In case someone is interested in modern typescript solution (with fail symbol used for filtering):
const failSymbol = Symbol();
export async function filterAsync<T>(
itemsToFilter: T[],
filterFunction: (item: T) => Promise<boolean>,
): Promise<T[]> {
const itemsOrFailFlags = await Promise.all(
itemsToFilter.map(async (item) => {
const hasPassed = await filterFunction(item);
return hasPassed ? item : failSymbol;
}),
);
return itemsOrFailFlags.filter(
(itemOrFailFlag) => itemOrFailFlag !== failSymbol,
) as T[];
}
There is a one liner to to do that.
const filterPromise = (values, fn) =>
Promise.all(values.map(fn)).then(booleans => values.filter((_, i) => booleans[i]));
Pass the array into values and the function into fn.
More description on how this one liner works is available here.
For production purposes you probably want to use a lib like lodasync:
import { filterAsync } from 'lodasync'
const result = await filterAsync(async(element) => {
await doSomething()
return element > 3
}, array)
Under the hood, it maps your array by invoking the callback on each element and filters the array using the result. But you should not reinvent the wheel.
You can do something like this...
theArrayYouWantToFilter = await new Promise(async (resolve) => {
const tempArray = [];
theArrayYouWantToFilter.filter(async (element, index) => {
const someAsyncValue = await someAsyncFunction();
if (someAsyncValue) {
tempArray.push(someAsyncValue);
}
if (index === theArrayYouWantToFilter.length - 1) {
resolve(tempArray);
}
});
});
Wrapped within an async function...
async function filter(theArrayYouWantToFilter) {
theArrayYouWantToFilter = await new Promise(async (resolve) => {
const tempArray = [];
theArrayYouWantToFilter.filter(async (element, index) => {
const someAsyncValue = await someAsyncFunction();
if (someAsyncValue) {
tempArray.push(someAsyncValue);
}
if (index === theArrayYouWantToFilter.length - 1) {
resolve(tempArray);
}
});
});
return theArrayYouWantToFilter;
}
A valid way to do this (but it seems too messy):
let arr = [1,2,3];
function filter(num) {
return new Promise((res, rej) => {
setTimeout(() => {
if( num === 3 ) {
res(num);
} else {
rej();
}
}, 1);
});
}
async function check(num) {
try {
await filter(num);
return true;
} catch(err) {
return false;
}
}
(async function() {
for( let num of arr ) {
let res = await check(num);
if(!res) {
let index = arr.indexOf(num);
arr.splice(index, 1);
}
}
})();
Again, seems way too messy.
A variant of #DanRoss's:
async function filterNums(arr) {
return await arr.reduce(async (res, val) => {
res = await res
if (await filter(val)) {
res.push(val)
}
return res
}, Promise.resolve([]))
}
Note that if (as in current case) you don't have to worry about filter() having
side effects that need to be serialized, you can also do:
async function filterNums(arr) {
return await arr.reduce(async (res, val) => {
if (await filter(val)) {
(await res).push(val)
}
return res
}, Promise.resolve([]))
}
Late to the party, and I know that my answer is similar to other already posted answers, but the function I'm going to share is ready for be dropped into any code and be used.
As usual, when you have to do complex operations on arrays, reduce is king:
const filterAsync = (asyncPred) => arr =>
arr.reduce(async (acc,item) => {
const pass = await asyncPred(item);
if(pass) (await acc).push(item);
return acc;
},[]);
It uses modern syntax so make sure your target supports it. To be 100% correct you should use Promise.resolve([]) as the initial value, but JS just doesn't care and this way it is way shorter.
Then you can use it like this:
var wait = ms => new Promise(resolve => setTimeout(resolve, ms));
const isOdd = x => wait(1).then(()=>x%2);
(filterAsync(isOdd)([1,2,3,4,4])).then(console.log) // => [1,3]
Here's a shorter version of #pie6k's Typescript version:
async function filter<T>(arr: T[], callback: (val: T) => Promise<Boolean>) {
const fail = Symbol()
const result = (await Promise.all(arr.map(async item => (await callback(item)) ? item : fail))).filter(i => i !== fail)
return result as T[] // the "fail" entries are all filtered out so this is OK
}
An efficient way of approaching this is by processing arrays as iterables, so you can apply any number of required operations in a single iteration.
The example below uses library iter-ops for that:
import {pipe, filter, toAsync} from 'iter-ops';
const arr = [1, 2, 3]; // synchronous iterable
const i = pipe(
toAsync(arr), // make our iterable asynchronous
filter(async (value, index) => {
// returns Promise<boolean>
})
);
(async function() {
for await (const a of i) {
console.log(a); // print values
}
})();
All operators within the library support asynchronous predicates when inside an asynchronous pipeline (why we use toAsync), and you can add other operators, in the same way.
Use of Promise.all for this is quite inefficient, because you block the entire array from any further processing that can be done concurrently, which the above approach allows.

How to use reduce in a Promise.all instead of map

How can I use reduce in the place of map when using Promise.all? My attempt results in an error UnhandledPromiseRejectionWarning: TypeError: #<Promise> is not iterable at Function.all (<anonymous>)
Eventually I would like to conditionally add innerResult to memo but I need to use reduce first.
const _ = require('lodash');
const eq = [{id:1}, {id:2}, {id:3}];
// block to replace
var biggerEq = _.map(eq, async (e) => {
const innerResult = await wait(e.id);
return innerResult;
})
// attempt at replacing above block
// var biggerEq = _.reduce(eq, async (memo, e) => {
// const innerResult = await wait(e.id);
// memo.push(innerResult)
// return memo;
// }, []);
Promise.all(biggerEq).then((result) => {
console.log(result) // outputs [ 2, 4, 6 ]
})
function wait (id) {
return new Promise((resolve) => {
setTimeout(() => {
resolve(id * 2);
}, 1000);
})
}
If you want to replace it with reduce, it's possible, but the logic will be a bit convoluted. Make the accumulator a Promise that resolves to an array that you can push to, then return it so the next iteration can use it (as a Promise):
const eq = [{id:1}, {id:2}, {id:3}];
function wait (id) {
return new Promise((resolve) => {
setTimeout(() => {
resolve(id * 2);
}, 1000);
})
}
const biggerEq = _.reduce(eq, async (arrProm, obj) => {
const [arr, innerResult] = await Promise.all([arrProm, wait(obj.id)]);
arr.push(innerResult);
return arr;
}, Promise.resolve([]));
biggerEq.then((arr) => {
console.log(arr);
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.15/lodash.min.js"></script>
(but .map is really more appropriate when you want to transform one array into another)
I think CertainPerformance over-complicated it. You can use reduce like this with Promise.all:
const eq = [{id: 1}, {id: 2}, {id: 3}];
function wait(id) {
return new Promise((resolve) => {
setTimeout(() => {
resolve(id * 2);
}, 1000);
});
}
const biggerEq = _.reduce(eq, (arr, obj) => {
const p = wait(obj.id);
arr.push(p);
return arr;
}, []);
Promise.all(biggerEq).then((arr) => {
console.log(arr);
});
Note that the problem was with using await inside the reduce. That meant that you were pushing the results of the promises into the array, not getting an array of promises. Using that method, you already have your results in the array, so there's no need for Promise.all, but there's the big disadvantage that the promises are resolved consecutively. If that's actually what you want, you can have:
const results = _.reduce(eq, async (arr, obj) => {
const p = await wait(obj.id);
arr.push(p);
return arr;
}, []);
console.log(results);

How to wait for each value of an observable with a promise

Let's say I have this observable:
const obs = new Observable((observer) => {
observer.next(0.25);
observer.next(0.75);
observer.next(new ArrayBuffer(100));
observer.complete();
});
How can I wait for each value with a promise?
The following code will only return the last value (value before complete() is called):
const value = await obs.toPromise();
But I want to be able to get each value along the way. I can do something like this:
const value1 = await obs.pipe(take(1)).toPromise();
const value2 = await obs.pipe(take(2)).toPromise();
But that's not ideal, since I'd have to increment the number each time and also take(1000) would still return something in the example, even though there are only 3 values. I'm looking for something like:
const value1 = await obs.pipe(next()).toPromise(); // 0.25
const value2 = await obs.pipe(next()).toPromise(); // 0.75
const value3 = await obs.pipe(next()).toPromise(); // ArrayBuffer(100)
const value4 = await obs.pipe(next()).toPromise(); // null
That is more akin to a generator.
Is there a way to accomplish something like this?
It seems like what you are asking for is a way to convert an observable into an async iterable so that you can asynchronously iterate over its values, either "by hand" or using the new for-await-of language feature.
Heres' an example of how to do that (I've not tested this code, so it might have some bugs):
// returns an asyncIterator that will iterate over the observable values
function asyncIterator(observable) {
const queue = []; // holds observed values not yet delivered
let complete = false;
let error = undefined;
const promiseCallbacks = [];
function sendNotification() {
// see if caller is waiting on a result
if (promiseCallbacks.length) {
// send them the next value if it exists
if (queue.length) {
const value = queue.shift();
promiseCallbacks.shift()[0]({ value, done: false });
}
// tell them the iteration is complete
else if (complete) {
while (promiseCallbacks.length) {
promiseCallbacks.shift()[0]({ done: true });
}
}
// send them an error
else if (error) {
while (promiseCallbacks.length) {
promiseCallbacks.shift()[1](error);
}
}
}
}
observable.subscribe(
value => {
queue.push(value);
sendNotification();
},
err => {
error = err;
sendNotification();
},
() => {
complete = true;
sendNotification();
});
// return the iterator
return {
next() {
return new Promise((resolve, reject) => {
promiseCallbacks.push([resolve, reject]);
sendNotification();
});
}
}
}
Use with for-wait-of language feature:
async someFunction() {
const obs = ...;
const asyncIterable = {
[Symbol.asyncIterator]: () => asyncIterator(obs)
};
for await (const value of asyncIterable) {
console.log(value);
}
}
Use without for-wait-of language feature:
async someFunction() {
const obs = ...;
const it = asyncIterator(obs);
while (true) {
const { value, done } = await it.next();
if (done) {
break;
}
console.log(value);
}
}
that might just work, since take(1) completes the observable, then it is consumed by await, next one in line will produce the second emission to value2.
const observer= new Subject()
async function getStream(){
const value1 = await observer.pipe(take(1)).toPromise() // 0.25
const value2 = await observer.pipe(take(1)).toPromise() // 0.75
return [value1,value2]
}
getStream().then(values=>{
console.log(values)
})
//const obs = new Observable((observer) => {
setTimeout(()=>{observer.next(0.25)},1000);
setTimeout(()=>observer.next(0.75),2000);
UPDATE: Using subject to emit.

Categories

Resources