fetch retry request (on failure) - javascript

I'm using browser's native fetch API for network requests. Also I am using the whatwg-fetch polyfill for unsupported browsers.
However I need to retry in case the request fails. Now there is this npm package whatwg-fetch-retry I found, but they haven't explained how to use it in their docs. Can somebody help me with this or suggest me an alternative?

From the fetch docs :
fetch('/users')
.then(checkStatus)
.then(parseJSON)
.then(function(data) {
console.log('succeeded', data)
}).catch(function(error) {
console.log('request failed', error)
})
See that catch? Will trigger when fetch fails, you can fetch again there.
Have a look at the Promise API.
Implementation example:
function wait(delay){
return new Promise((resolve) => setTimeout(resolve, delay));
}
function fetchRetry(url, delay, tries, fetchOptions = {}) {
function onError(err){
triesLeft = tries - 1;
if(!triesLeft){
throw err;
}
return wait(delay).then(() => fetchRetry(url, delay, triesLeft, fetchOptions));
}
return fetch(url,fetchOptions).catch(onError);
}
Edit 1: as suggested by golopot, p-retry is a nice option.
Edit 2: simplified example code.

I recommend using some library for promise retry, for example p-retry.
Example:
const pRetry = require('p-retry')
const fetch = require('node-fetch')
async function fetchPage () {
const response = await fetch('https://stackoverflow.com')
// Abort retrying if the resource doesn't exist
if (response.status === 404) {
throw new pRetry.AbortError(response.statusText)
}
return response.blob()
}
;(async () => {
console.log(await pRetry(fetchPage, {retries: 5}))
})()

I don't like recursion unless is really necessary. And managing an exploding number of dependencies is also an issue. Here is another alternative in typescript. Which is easy to translate to javascript.
interface retryPromiseOptions<T> {
retryCatchIf?:(response:T) => boolean,
retryIf?:(response:T) => boolean,
retries?:number
}
function retryPromise<T>(promise:() => Promise<T>, options:retryPromiseOptions<T>) {
const { retryIf = (_:T) => false, retryCatchIf= (_:T) => true, retries = 1} = options
let _promise = promise();
for (var i = 1; i < retries; i++)
_promise = _promise.catch((value) => retryCatchIf(value) ? promise() : Promise.reject(value))
.then((value) => retryIf(value) ? promise() : Promise.reject(value));
return _promise;
}
And use it this way...
retryPromise(() => fetch(url),{
retryIf: (response:Response) => true, // you could check before trying again
retries: 5
}).then( ... my favorite things ... )
I wrote this for the fetch API on the browser. Which does not issue a reject on a 500. And did I did not implement a wait. But, more importantly, the code shows how to use composition with promises to avoid recursion.
Javascript version:
function retryPromise(promise, options) {
const { retryIf, retryCatchIf, retries } = { retryIf: () => false, retryCatchIf: () => true, retries: 1, ...options};
let _promise = promise();
for (var i = 1; i < retries; i++)
_promise = _promise.catch((value) => retryCatchIf(value) ? promise() : Promise.reject(value))
.then((value) => retryIf(value) ? promise() : Promise.reject(value));
return _promise;
}
Javascript usage:
retryPromise(() => fetch(url),{
retryIf: (response) => true, // you could check before trying again
retries: 5
}).then( ... my favorite things ... )
EDITS: Added js version, added retryCatchIf, fixed the loop start.

One can easily wrap fetch(...) in a loop and catch potential errors (fetch only rejects the returning promise on network errors and the alike):
const RETRY_COUNT = 5;
async function fetchRetry(...args) {
let count = RETRY_COUNT;
while(count > 0) {
try {
return await fetch(...args);
} catch(error) {
// logging ?
}
// logging / waiting?
count -= 1;
}
throw new Error(`Too many retries`);
}

Related

Continue on error in RxJs pipeable with mergeMap

I am doing some parallel HTTP get with RxJs pipe and the mergeMap operator.
On the first request fail (let's imagine /urlnotexists throw a 404 error) it stops all other requests.
I want it to continue query all remaining urls without calling all remaining mergeMap for this failed request.
I tried to play with throwError, and catchError from RxJs but without success.
index.js
const { from } = require('rxjs');
const { mergeMap, scan } = require('rxjs/operators');
const request = {
get: url => {
return new Promise((resolve, reject) => {
setTimeout(() => {
if (url === '/urlnotexists') { return reject(new Error(url)); }
return resolve(url);
}, 1000);
});
}
};
(async function() {
await from([
'/urlexists',
'/urlnotexists',
'/urlexists2',
'/urlexists3',
])
.pipe(
mergeMap(async url => {
try {
console.log('mergeMap 1:', url);
const val = await request.get(url);
return val;
} catch(err) {
console.log('err:', err.message);
// a throw here prevent all remaining request.get() to be tried
}
}),
mergeMap(async val => {
// should not pass here if previous request.get() failed
console.log('mergeMap 2:', val);
return val;
}),
scan((acc, val) => {
// should not pass here if previous request.get() failed
acc.push(val);
return acc;
}, []),
)
.toPromise()
.then(merged => {
// should have merged /urlexists, /urlexists2 and /urlexists3
// even if /urlnotexists failed
console.log('merged:', merged);
})
.catch(err => {
console.log('catched err:', err);
});
})();
$ node index.js
mergeMap 1: /urlexists
mergeMap 1: /urlnotexists
mergeMap 1: /urlexists2
mergeMap 1: /urlexists3
err: /urlnotexists
mergeMap 2: /urlexists
mergeMap 2: undefined <- I didn't wanted this mergeMap to have been called
mergeMap 2: /urlexists2
mergeMap 2: /urlexists3
merged: [ '/urlexists', undefined, '/urlexists2', '/urlexists3' ]
I expect to make concurrent GET requests and reduce their respectives values in one object at the end.
But if some error occurs I want them not to interrupt my pipe, but to log them.
Any advice ?
If you want to use RxJS you should add error handling with catchError and any additional tasks to a single request before you execute all your requests concurrently with forkJoin.
const { of, from, forkJoin } = rxjs;
const { catchError, tap } = rxjs.operators;
// your promise factory, unchanged (just shorter)
const request = {
get: url => {
return new Promise((resolve, reject) => setTimeout(
() => url === '/urlnotexists' ? reject(new Error(url)) : resolve(url), 1000
));
}
};
// a single rxjs request with error handling
const fetch$ = url => {
console.log('before:', url);
return from(request.get(url)).pipe(
// add any additional operator that should be executed for each request here
tap(val => console.log('after:', val)),
catchError(error => {
console.log('err:', error.message);
return of(undefined);
})
);
};
// concurrently executed rxjs requests
forkJoin(["/urlexists", "/urlnotexists", "/urlexists2", "/urlexists3"].map(fetch$))
.subscribe(merged => console.log("merged:", merged));
<script src="https://unpkg.com/#reactivex/rxjs#6.5.3/dist/global/rxjs.umd.js"></script>
If you are willing to forego RXJS and just solve with async/await it is very straightforward:
const urls = ['/urlexists', '/urlnotexists', '/urlexists2', '/urlexists3'];
const promises = urls.map(url => request(url));
const resolved = await Promise.allSettled(promises);
// print out errors
resolved.forEach((r, i) => {
if (r.status === 'rejected') {
console.log(`${urls[i]} failed: ${r.reason}`)
}
});
// get the success results
const merged = resolved.filter(r => r.status === 'resolved').map(r => r.value);
console.log('merged', merged);
This make use of Promise.allSettled proposed helper method. If your environment does not have this method, you can implement it as shown in this answer.

How to apply Promise.resolve for code that needs to be atomic

I am working on a partner manager and some code need to be atomic because currently there is race condition and cant work when 2 clients calls same resource at same time. retrievePartners method returns partners and that method should me atomic. Basicaly partners are the limited resources and providing mechanism should deal only one client (asking for partner) at a time.
I have been told the code below works for atomic operation, since javascript is atomic by native.
let processingQueue = Promise.resolve();
function doStuffExclusively() {
processingQueue = processingQueue.then(() => {
return fetch('http://localhost', {method: 'PUT', body: ...});
}).catch(function(e){
throw e;
});
return processingQueue;
}
doStuffExclusively()
doStuffExclusively()
doStuffExclusively()
However this code is basic, my code has some await that calls another await , and so on. I want to apply that mechanism for below code but really dont know how to do, I tried few tings but no work. Can not get await work inside a then statement.
I am also confused is above code returns true in then part of processingQueue. However in my case, I return an array, or throw an error message. Should I return something to get it work as above.
Here is the function I want to make atomic just like the above code. I tried to put everything in this function in then section, before return statement, but did not worked, since
export class Workout {
constructor (config) {
this.instructorPeer = new jet.Peer(config)
this.instructorPeer.connect()
}
async createSession (partnerInfo) {
const partners = { chrome: [], firefox: [], safari: [], ie: [] }
const appropriatePartners = await this.retrievePartners(partnerInfo)
Object.keys(appropriatePartners).forEach(key => {
appropriatePartners[key].forEach(partner => {
const newPartner = new Partner(this.instructorPeer, partner.id)
partners[key].push(newPartner)
})
})
return new Session(partners)
}
async retrievePartners (capabilities) {
const appropriatePartners = { chrome: [], firefox: [], safari: [], ie: [] }
const partners = await this.getAllPartners()
// first check if there is available appropriate Partners
Object.keys(capabilities.type).forEach(key => {
let typeNumber = parseInt(capabilities.type[key])
for (let i = 0; i < typeNumber; i++) {
partners.forEach((partner, i) => {
if (
key === partner.value.type &&
partner.value.isAvailable &&
appropriatePartners[key].length < typeNumber
) {
appropriatePartners[key].push(partner)
console.log(appropriatePartners[key].length)
}
})
if (appropriatePartners[key].length < typeNumber) {
throw new Error(
'Sorry there are no appropriate Partners for this session'
)
}
}
})
Object.keys(appropriatePartners).forEach(key => {
appropriatePartners[key].forEach(partner => {
this.instructorPeer.set('/partners/' + partner.id + '/states/', {
isAvailable: false
})
})
})
return appropriatePartners
}
async getAllPartners (capabilities) {
const partners = []
const paths = await this.instructorPeer.get({
path: { startsWith: '/partners/' }
})
paths.forEach((path, i) => {
if (path.fetchOnly) {
let obj = {}
obj.value = path.value
obj.id = path.path.split('/partners/')[1]
obj.value.isAvailable = paths[i + 1].value.isAvailable
partners.push(obj)
}
})
return partners
}
Here is the code that calls it
async function startTest () {
const capabilities = {
type: {
chrome: 1
}
}
const workoutServerConfig = {
url: 'ws://localhost:8090'
}
const workout = createWorkout(workoutServerConfig)
const session = await workout.createSession(capabilities)
const session1 = await workout.createSession(capabilities)
and here is what I tried so for and not worked, session is not defined et all
let processingQueue = Promise.resolve()
export class Workout {
constructor (config) {
this.instructorPeer = new jet.Peer(config)
this.instructorPeer.connect()
this.processingQueue = Promise.resolve()
}
async createSession (partnerInfo) {
this.processingQueue = this.processingQueue.then(() => {
const partners = { chrome: [], firefox: [], safari: [], ie: [] }
const appropriatePartners = this.retrievePartners(partnerInfo)
Object.keys(appropriatePartners).forEach(key => {
appropriatePartners[key].forEach(partner => {
const newPartner = new Partner(this.instructorPeer, partner.id)
partners[key].push(newPartner)
})
})
return new Session(partners)
})
}
This is promise-based locking, based on the facts that:
1) the .then() handler will only be called once the lock has resolved.
2) once the .then() handler begins executing, no other JS code will execute, due to JS' execution model.
The overall structure of the approach you cited is correct.
The main issue I see with your code is that const appropriatePartners = this.retrievePartners(partnerInfo) will evaluate to a promise, because retrievePartners is async. You want to:
const appropriatePartners = await this.retrievePartners(partnerInfo).
This will cause your lock's executor to block on the retrievePartners call, whereas currently you are simply grabbing a promise wrapping that call's eventual return value.
Edit: See jsfiddle for an example.
In sum:
1) make the arrow function handling lock resolution async
2) make sure it awaits the return value of this.retrievePartners, otherwise you will be operating on the Promise, not the resolved value.

How do I get a reference to the current Promise within then()

In the code below, I would like to check that the callback is executing from the latestRequest, so I am checking thisPromise to see if it is the same as latestRequest. Obviously thisPromise doesn't work. Is there a way to get the current Promise?
let latestRequest = MyLib
.retrieve(getFilteredQuery(filters, queries, user))
.then(res => {
// Checking whether this is the latest request handler
if (latestRequest === thisPromise) {
updateData(res)
}
})
.catch(err => {
console.error(err)
})
My use case is for handling requests from an API. I only want the data to be updated for the latest request. The requests can take very different times to return, and sometimes an earlier request is coming back later and overwriting the latest request. If you know a good way to handle this, please let me know.
Implementation within a closure:
const run = (() => {
let currentPromise;
return () => {
const p = new Promise((resolve, reject) => {
// run an asynchronous process and resolve like resolve(results)
})
.then(results => {
if (p === currentPromise) {
// process results here
}
})
currentPromise = p;
}
})()
Similar alternative using class:
class Request {
static #currentPromise;
static run() {
const p = new Promise((resolve, reject) => {
// run an asynchronous process and resolve like resolve(results)
})
.then(results => {
if (p === Request.#currentPromise) {
// process results here
}
})
Request.#currentPromise = p;
}
}
You could test by implementing with simulated latency:
const run = (() => {
let currentPromise;
return (timeout) => {
const p = new Promise((resolve, reject) => {
setTimeout(_ => resolve(timeout), timeout);
})
.then(data => {
if (p === currentPromise) {
console.log('latest request', data);
}
})
currentPromise = p;
}
})()
run(1000); // 1s request
run( 500);
run( 10); // last request, 0.1s
There is no method of obtaining a reference to the promise object .then was called on from within the handler supplied by .then.
One suggestion is to assign the handler a sequence number and check if it is the last one issued, from within a closure. Untested example:
let latestRequestId = 0;
let checkLatest = ()=> {
let thisRequest = ++latestRequestId;
return (res=>{
// Checking whether this is the latest request handler
if (latestRequestId === thisRequest) {
updateData(res)
}
})
}
let latestRequest = MyLib
.retrieve(getFilteredQuery(filters, queries, user))
.then(checkLatest())
.catch(err => {
console.error(err)
})

testing promises causing undefined values

I am getting this error when I am testing my code:
1) Sourcerer Testing: getStatusCode :
Error: Expected undefined to equal 200
I'm not sure why I am getting undefined in my tests but when I run the code I get 200. It might be from not handling promises properly
Test code:
import expect from 'expect';
import rp from 'request-promise';
import Sourcerer from './sourcerer';
describe("Sourcerer Testing: ", () => {
let sourcerer = new Sourcerer(null);
const testCases = {
"https://www.google.com": 200,
// "www.google.com":
};
describe("getStatusCode", () => {
it("", () => {
for (let testCase in testCases) {
sourcerer.setSourcererUrl(testCase);
expect(sourcerer.url).toEqual(testCase);
expect(sourcerer.getStatusCode()).toEqual(testCases[testCase]);
}
});
});
});
code:
import rp from 'request-promise';
export default class Sourcerer {
constructor(url) {
this.options = {
method: 'GET',
url,
resolveWithFullResponse: true
};
this.payload = {};
}
setSourcererUrl(url) {
this.url = url;
}
getSourcererUrl() {
return this.url;
}
analyzeSourcePage() {
rp(this.options).then((res) => {
console.log(res);
}).catch((err) => {
console.log("ERROR");
throw(err);
});
}
getStatusCode() {
rp(this.options).then((res) => {
console.log(res.statusCode);
return res.statusCode;
}).catch((err) => {
console.log("STATUS CODE ERROR");
return 0;
});
}
}
getStatusCode doesn't return anything. And it should return a promise:
getStatusCode() {
return rp(this.options)...
}
The spec will fail in this case, because it expects promise object to equal 200.
It is even more complicated because the spec is async and there are several promises that should be waited before the spec will be completed. It should be something like
it("", () => {
let promises = [];
for (let testCase in testCases) {
sourcerer.setSourcererUrl(testCase);
let statusCodePromise = sourcerer.getStatusCode()
.then((statusCode) => {
expect(sourcerer.url).toEqual(testCase);
expect(statusCode).toEqual(testCases[testCase]);
})
.catch((err) => {
throw err;
});
promises.push(statusCodePromise);
}
return promises;
});
co offers an awesome alternative to Promise.all for flow control:
it("", co.wrap(function* () {
for (let testCase in testCases) {
sourcerer.setSourcererUrl(testCase);
expect(sourcerer.url).toEqual(testCase);
let statusCode = yield sourcerer.getStatusCode();
expect(statusCode).toEqual(testCases[testCase]);
}
});
Disclaimer: I wouldn't run a for-loop in a single it(), since I want to know which iteration failed. granted that there are ways to achieve that, but that is another story. Also, this very much depends on you test runner, but here is some rules of thumb I find useful.
But for what you have asked, the test should not evaluate until the promise is resolved. sometimes (e.g. in mocha), that means returning the promise from the it() internal function. sometimes, it means getting a done function and calling it when you are ready for the test to evaluate. If you provide more info on your test framework, I may be able to help (others certainly would be)

Limit concurrency of pending promises

I'm looking for a promise function wrapper that can limit / throttle when a given promise is running so that only a set number of that promise is running at a given time.
In the case below delayPromise should never run concurrently, they should all run one at a time in a first-come-first-serve order.
import Promise from 'bluebird'
function _delayPromise (seconds, str) {
console.log(str)
return Promise.delay(seconds)
}
let delayPromise = limitConcurrency(_delayPromise, 1)
async function a() {
await delayPromise(100, "a:a")
await delayPromise(100, "a:b")
await delayPromise(100, "a:c")
}
async function b() {
await delayPromise(100, "b:a")
await delayPromise(100, "b:b")
await delayPromise(100, "b:c")
}
a().then(() => console.log('done'))
b().then(() => console.log('done'))
Any ideas on how to get a queue like this set up?
I have a "debounce" function from the wonderful Benjamin Gruenbaum. I need to modify this to throttle a promise based on it's own execution and not the delay.
export function promiseDebounce (fn, delay, count) {
let working = 0
let queue = []
function work () {
if ((queue.length === 0) || (working === count)) return
working++
Promise.delay(delay).tap(function () { working-- }).then(work)
var next = queue.shift()
next[2](fn.apply(next[0], next[1]))
}
return function debounced () {
var args = arguments
return new Promise(function (resolve) {
queue.push([this, args, resolve])
if (working < count) work()
}.bind(this))
}
}
I don't think there are any libraries to do this, but it's actually quite simple to implement yourself:
function sequential(fn) { // limitConcurrency(fn, 1)
let q = Promise.resolve();
return function(x) {
const p = q.then(() => fn(x));
q = p.reflect();
return p;
};
}
For multiple concurrent requests it gets a little trickier, but can be done as well.
function limitConcurrency(fn, n) {
if (n == 1) return sequential(fn); // optimisation
let q = Promise.resolve();
const active = new Set();
const fst = t => t[0];
const snd = t => t[1];
return function(x) {
function put() {
const p = fn(x);
const a = p.reflect().then(() => {
active.delete(a);
});
active.add(a);
return [Promise.race(active), p];
}
if (active.size < n) {
const r = put()
q = fst(t);
return snd(t);
} else {
const r = q.then(put);
q = r.then(fst);
return r.then(snd)
}
};
}
Btw, you might want to have a look at the actors model and CSP. They can simplify dealing with such things, there are a few JS libraries for them out there as well.
Example
import Promise from 'bluebird'
function sequential(fn) {
var q = Promise.resolve();
return (...args) => {
const p = q.then(() => fn(...args))
q = p.reflect()
return p
}
}
async function _delayPromise (seconds, str) {
console.log(`${str} started`)
await Promise.delay(seconds)
console.log(`${str} ended`)
return str
}
let delayPromise = sequential(_delayPromise)
async function a() {
await delayPromise(100, "a:a")
await delayPromise(200, "a:b")
await delayPromise(300, "a:c")
}
async function b() {
await delayPromise(400, "b:a")
await delayPromise(500, "b:b")
await delayPromise(600, "b:c")
}
a().then(() => console.log('done'))
b().then(() => console.log('done'))
// --> with sequential()
// $ babel-node test/t.js
// a:a started
// a:a ended
// b:a started
// b:a ended
// a:b started
// a:b ended
// b:b started
// b:b ended
// a:c started
// a:c ended
// b:c started
// done
// b:c ended
// done
// --> without calling sequential()
// $ babel-node test/t.js
// a:a started
// b:a started
// a:a ended
// a:b started
// a:b ended
// a:c started
// b:a ended
// b:b started
// a:c ended
// done
// b:b ended
// b:c started
// b:c ended
// done
Use the throttled-promise module:
https://www.npmjs.com/package/throttled-promise
var ThrottledPromise = require('throttled-promise'),
promises = [
new ThrottledPromise(function(resolve, reject) { ... }),
new ThrottledPromise(function(resolve, reject) { ... }),
new ThrottledPromise(function(resolve, reject) { ... })
];
// Run promises, but only 2 parallel
ThrottledPromise.all(promises, 2)
.then( ... )
.catch( ... );
I have the same problem. I wrote a library to implement it. Code is here. I created a queue to save all the promises. When you push some promises to the queue, the first several promises at the head of the queue would be popped and running. Once one promise is done, the next promise in the queue would also be popped and running. Again and again, until the queue has no Task. You can check the code for details. Hope this library would help you.
Advantages
you can define the amount of concurrent promises (near simultaneous requests)
consistent flow: once one promise resolve, another request start no need to guess the server capability
robust against data choke, if the server stop for a moment, it will just wait, and next tasks will not start just because the
clock allowed
do not rely on a 3rd party module it is Vanila node.js
1st thing is to make https a promise, so we can use wait to retrieve data (removed from the example)
2nd create a promise scheduler that submit another request as any promise get resolved.
3rd make the calls
Limiting requests taking by limiting the amount of concurrent promises
const https = require('https')
function httpRequest(method, path, body = null) {
const reqOpt = {
method: method,
path: path,
hostname: 'dbase.ez-mn.net',
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-cache"
}
}
if (method == 'GET') reqOpt.path = path + '&max=20000'
if (body) reqOpt.headers['Content-Length'] = Buffer.byteLength(body);
return new Promise((resolve, reject) => {
const clientRequest = https.request(reqOpt, incomingMessage => {
let response = {
statusCode: incomingMessage.statusCode,
headers: incomingMessage.headers,
body: []
};
let chunks = ""
incomingMessage.on('data', chunk => { chunks += chunk; });
incomingMessage.on('end', () => {
if (chunks) {
try {
response.body = JSON.parse(chunks);
} catch (error) {
reject(error)
}
}
console.log(response)
resolve(response);
});
});
clientRequest.on('error', error => { reject(error); });
if (body) { clientRequest.write(body) }
clientRequest.end();
});
}
const asyncLimit = (fn, n) => {
const pendingPromises = new Set();
return async function(...args) {
while (pendingPromises.size >= n) {
await Promise.race(pendingPromises);
}
const p = fn.apply(this, args);
const r = p.catch(() => {});
pendingPromises.add(r);
await r;
pendingPromises.delete(r);
return p;
};
};
// httpRequest is the function that we want to rate the amount of requests
// in this case, we set 8 requests running while not blocking other tasks (concurrency)
let ratedhttpRequest = asyncLimit(httpRequest, 8);
// this is our datase and caller
let process = async () => {
patchData=[
{path: '/rest/slots/80973975078587', body:{score:3}},
{path: '/rest/slots/809739750DFA95', body:{score:5}},
{path: '/rest/slots/AE0973750DFA96', body:{score:5}}]
for (let i = 0; i < patchData.length; i++) {
ratedhttpRequest('PATCH', patchData[i].path, patchData[i].body)
}
console.log('completed')
}
process()
The classic way of running async processes in series is to use async.js and use async.series(). If you prefer promise based code then there is a promise version of async.js: async-q
With async-q you can once again use series:
async.series([
function(){return delayPromise(100, "a:a")},
function(){return delayPromise(100, "a:b")},
function(){return delayPromise(100, "a:c")}
])
.then(function(){
console.log(done);
});
Running two of them at the same time will run a and b concurrently but within each they will be sequential:
// these two will run concurrently but each will run
// their array of functions sequentially:
async.series(a_array).then(()=>console.log('a done'));
async.series(b_array).then(()=>console.log('b done'));
If you want to run b after a then put it in the .then():
async.series(a_array)
.then(()=>{
console.log('a done');
return async.series(b_array);
})
.then(()=>{
console.log('b done');
});
If instead of running each sequentially you want to limit each to run a set number of processes concurrently then you can use parallelLimit():
// Run two promises at a time:
async.parallelLimit(a_array,2)
.then(()=>console.log('done'));
Read up the async-q docs: https://github.com/dbushong/async-q/blob/master/READJSME.md

Categories

Resources