How to pause between two asynchronous actions? - javascript

I need to make pause between "/system/backup/save" and "/file/print". Because otherwise, the backup will not be completed before the contents of the "/file" directory are displayed.
Now the code is performing a backup, but it gives me a list of files in which there is no backup yet.
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
async function backup() {
return new Promise(function (resolve, reject) {
for (let elem of hosts) {
const conn = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
conn.connect()
.then((client) => {
return conn.write(["/system/backup/save",]).then((data) => {
resolve('COMPLETE - OK');
}).catch((err) => {
reject('ERROR!');
});
sleep(5000);
}).then(() => {
return conn.write("/file/print").then((data2) => {
console.log(data2)
resolve('CHECK - OK');
conn.close();
}).catch((err) => {
reject('ERROR!');
});
}).catch((err) => {
reject('ERROR CONNECT TO ' + elem.name);
});
}
});
}
backup();

Generally, using a delay to wait for completion of an asynchronous process is an anti-pattern  you'll always end up either not waiting long enough or waiting an unnecessarily long time. The former is of course a bigger problem than the latter, but both are problems. If you have any means of having the other end report completion of the backup, that would be your best bet. Looking at the documentation, it seems like conn.write's promise shouldn't be fulfilled until the operation is complete, but I only skimmed the docs so maybe that's not the case.
Other than that:
Don't create the promise explicitly, your async function automatically creates a promise (but you may not want an async function here anyway)
Don't mix using .then/.catch handlers with async functions; use await.
For instance, here's a version that runs the backups and such in parallel and returns an array giving success/failure via allSettled:
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
async function backup() {
// Run the hosts in parallel
return await Promise.allSettled(hosts.map(async (host) => {
let conn;
try {
const c = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
const client = await c.connect();
conn = c;
await conn.write(["/system/backup/save",]);
await sleep(5000); // Only if really unavoidable because the
// backup continues even after the promise
// from `write` is fulfilled
await conn.write("/file/print");
conn = null;
c.close();
} catch (e) {
if (conn) {
try {
conn.close();
} catch {} // Don't let errors in close shadow previous errors
}
throw e;
}
}));
}
backup()
.then(results => {
// Check for status = "rejected" entries in results and report the errors
});
But note that since that function just returns the promise from allSettled, you might not want an async function at all:
const RouterOSAPI = require("node-routeros").RouterOSAPI;
const sleep = require('util').promisify(setTimeout);
var hosts = require('./config.json');
function backup() {
// Run the hosts in parallel
return Promise.allSettled(hosts.map(async (host) => {
let conn;
try {
const c = new RouterOSAPI({
host: elem.host,
user: elem.user,
password: elem.password
})
const client = await c.connect();
conn = c;
await conn.write(["/system/backup/save",]);
await sleep(5000); // Only if really unavoidable because the
// backup continues even after the promise
// from `write` is fulfilled
await conn.write("/file/print");
conn = null;
c.close();
} catch (e) {
if (conn) {
try {
conn.close();
} catch {} // Don't let errors in close shadow previous errors
}
throw e;
}
}));
}
backup()
.then(results => {
// Check for status = "rejected" entries in results and report the errors
});
(There's a subtle difference between those two around what happens if hosts.map throws an error — perhaps because hosts isn't an array — but it's probably not important. The former returns a rejected promise, the latter throws a synchronous error.)

Related

Running two browser instances in parallel for same list of websites in Puppeteer

I wrote javascript code for a web crawler that scraps data from a list of websites (in csv file) in a single browser instance (code below). Now I want to modify the code for the scenario in which every single website in the list runs parallel at the same time in two browser instances. For example, a website www.a.com in the list should run in parallel at the same time on two browser instances and the same goes for the rest of the websites. If anyone can help me, please. I would be very thankful.
(async () => {
require("dotenv").config();
if (!process.env.PROXY_SPKI_FINGERPRINT) {
throw new Error("PROXY_SPKI_FINGERPRINT is not defined in environment.");
}
const fs = require("fs");
const fsPromises = fs.promises;
const pptr = require("puppeteer");
const browser = await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
for (let i in sites) {
const site = sites[i];
console.log(`[${i}] ${site}`);
try {
await fsPromises.appendFile("data.txt", JSON.stringify(await crawl(browser, site)) + "\n");
} catch (e) {
console.error(e);
}
}
await browser.close();
async function crawl(browser, site) {
const page = await browser.newPage();
try {
const grepResult = [];
page.on("request", async request => {
request.continue();
})
page.on("response", async response => {
try {
if (response.request().resourceType() === "script" &&
response.headers()["content-type"] &&
response.headers()["content-type"].includes("javascript")) {
const js = await response.text();
const grepPartResult = grepMagicWords(js);
grepResult.push([response.request().url(), grepPartResult]);
}
} catch (e) {}
});
await page.setRequestInterception(true);
try {
await page.goto("http://" + site, {waitUntil: "load", timeout: 60000});
await new Promise(resolve => { setTimeout(resolve, 10000); });
} catch (e) { console.error(e); }
const [flows, url] = await Promise.race([
page.evaluate(() => [J$.FLOWS, document.URL]),
new Promise((_, reject) => { setTimeout(() => { reject(); }, 5000); })
]);
return {url: url, grepResult: grepResult, flows: flows};
} finally {
await page.close();
}
function grepMagicWords(js) {
var re = /(?:\'|\")(?:g|s)etItem(?:\'|\")/g, match, result = [];
while (match = re.exec(js)) {
result.push(js.substring(match.index - 100, match.index + 100));
}
return result;
}
}
})();
You can launch multiple browsers and run them in parallel. You would have to restructure your app slighltly for that. Create a wrapper for crawl which launches it with a new browser instance. I created crawlNewInstance which does that for you. You would also need to run crawlNewInstance() in parallel.
Checkout this code:
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
const crawlerProms = sites.map(async (site, index) => {
try {
console.log(`[${index}] ${site}`);
await fsPromises.appendFile("data.txt", JSON.stringify(await crawlNewInstance(site)) + "\n");
} catch (e) {
console.error(e);
}
}
// await all the crawlers!.
await Promise.all(crawlerProms)
async function crawlNewInstance(site) {
const browser = await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
const result = await crawl(browser, site)
await browser.close()
return result
}
optional
The above answers basically the question. But If you want to go further I was in a run and had nothing todo :)
If you have plenty of pages, which you wanted to crawl in parallel and for example limit the amount of parallel requests you could use a Queue:
var { EventEmitter} = require('events')
class AsyncQueue extends EventEmitter {
limit = 2
enqueued = []
running = 0
constructor(limit) {
super()
this.limit = limit
}
isEmpty() {
return this.enqueued.length === 0
}
// make sure to only pass `async` function to this queue!
enqueue(fn) {
// add to queue
this.enqueued.push(fn)
// start a job. If max instances are already running it does nothing.
// otherwise it runs a new job!.
this.next()
}
// if a job is done try starting a new one!.
done() {
this.running--
console.log('job done! remaining:', this.limit - this.running)
this.next()
}
async next() {
// emit if queue is empty once.
if(this.isEmpty()) {
this.emit('empty')
return
}
// if no jobs are available OR limit is reached do nothing
if(this.running >= this.limit) {
console.log('queueu full.. waiting!')
return
}
this.running++
console.log('running job! remaining slots:', this.limit - this.running)
// first in, first out! so take first element in array.
const job = this.enqueued.shift()
try {
await job()
} catch(err) {
console.log('Job failed!. ', err)
this.emit('error', err)
}
// job is done!
// Done() will call the next job if there are any available!.
this.done()
}
}
The queue could be utilised with this code:
// create queue
const limit = 3
const queue = new AsyncQueue(limit)
// listen for any errors..
queue.on('error', err => {
console.error('error occured in queue.', err)
})
for(let site of sites) {
// enqueue all crawler jobs.
// pass an async function which does whatever you want. In this case it crawls
// a web page!.
queue.enqueue(async() => {
await fsPromises.appendFile("data.txt", JSON.stringify(await crawlNewInstance(site)) + "\n");
})
}
// helper for watiing for the queue!
const waitForQueue = async () => {
if(queue.isEmpty) return Promise.resolve()
return new Promise((res, rej) => {
queue.once('empty', res)
})
}
await waitForQueue()
console.log('crawlers done!.')
Even further with BrowserPool
It would also be possible to reuse your browser instances, so it would not be necessary to start a new browser instance for every crawling process. This can be done using this Browserpool helper class
var pptr = require('puppeteer')
async function launchPuppeteer() {
return await pptr.launch({
args: [
"--proxy-server=https://127.0.0.1:8000",
"--ignore-certificate-errors-spki-list=" + process.env.PROXY_SPKI_FINGERPRINT,
"--disable-web-security",
],
// headless: false,
});
}
// manages browser connections.
// creates a pool on startup and allows getting references to
// the browsers! .
class BrowserPool {
browsers = []
async get() {
// return browser if there is one!
if(this.browsers.length > 0) {
return this.browsers.splice(0, 1)[0]
}
// no browser available anymore..
// launch a new one!
return await launchPuppeteer()
}
// used for putting a browser back in pool!.
handback(browser) {
this.browsers.push(browser)
}
// shuts down all browsers!.
async shutDown() {
for(let browser of this.browsers) {
await browser.close()
}
}
}
You can then remove crawlNewInstance() and adjust the code to look like this finally:
const sites = (await fsPromises.readFile(process.argv[2])) // sites list in csv file
.toString()
.split("\n")
.map(line => line.split(",")[1])
.filter(s => s);
// create browserpool
const pool = new BrowserPool()
// create queue
const limit = 3
const queue = new AsyncQueue(3)
// listen to errors:
queue.on('error', err => {
console.error('error in the queue detected!', err)
})
// enqueue your jobs
for(let site of sites) {
// enqueue an async function which takes a browser from pool
queue.enqueue(async () => {
try {
// get the browser and crawl a page!.
const browser = await pool.get()
const result = await crawl(browser, site)
await fsPromises.appendFile("data.txt", JSON.stringify(result) + "\n");
// return the browser back to pool so other crawlers can use it! .
pool.handback(browser)
} catch(err) {
console.error(err)
}
})
}
// helper for watiing for the queue!
const waitForQueue = async () => {
// maybe jobs fail in a few milliseconds so check first if its already empty..
if(queue.isEmpty) return Promise.resolve()
return new Promise((res, rej) => {
queue.once('empty', res)
})
}
// wait for the queue to finish :)
await waitForQueue()
// in the very end, shut down all browser:
await pool.shutDown()
console.log('done!.')
Have fun and feel free to leave a comment.

How to test status of a promise inside Promise.finally() without awaiting it in production code

I am using Promise.prototype.finally() (or try-catch-finally in an async function) in my production code to execute some follow-up code without changing resolution/rejection status of the current promise.
However, in my Jest tests, I would like to detect that the Promise inside finally block wasn't rejected.
edit: But I don't want to actually await the Promise in my "production" code (there I care only about errors re-thrown from catch, but not about errors from finally).
How can I test for that? Or at least how to mock the Promise.prototype to reject the current promise on exceptions from finally?
E.g. if I would be testing redux action creators, the tests pass even though there is a message about an unhandled Promise rejection:
https://codesandbox.io/s/reverent-dijkstra-nbcno?file=/src/index.test.js
test("finally", async () => {
const actions = await dispatchMock(add("forgottenParent", { a: 1 }));
const newState = actions.reduce(reducer, undefined);
expect(newState).toEqual({});
});
const dispatchMock = async thunk => {...};
// ----- simplified "production" code -----
const reducer = (state = {}, action) => state;
const add = parentId => async dispatch => {
dispatch("add start");
try {
await someFetch("someData");
dispatch("add success");
} catch (e) {
dispatch("add failed");
throw e;
} finally {
dispatch(get(parentId)); // tests pass if the promise here is rejected
}
};
const get = id => async dispatch => {
dispatch("get start");
try {
await someFetch(id);
dispatch("get success");
} catch (e) {
dispatch("get failed");
throw e;
}
};
const someFetch = async id => {
if (id === "forgottenParent") {
throw new Error("imagine I forgot to mock this request");
}
Promise.resolve(id);
};
dispatch(get(parentId)); // tests pass if an exception is thrown here
There is no exception throw in that line. get(parentId) might return a rejected promise (or a pending promise that will get rejected later), but that's not an exception and won't affect control flow.
You might be looking for
const add = parentId => async dispatch => {
dispatch("add start");
try {
await someFetch("someData");
dispatch("add success");
} catch (e) {
dispatch("add failed");
throw e;
} finally {
await dispatch(get(parentId));
// ^^^^^
}
};
Notice that throwing exceptions from a finally block is not exactly a best practice though.
edit: more general solutions available on https://stackoverflow.com/a/58634792/1176601
It is possible to store the Promise in a variable accessible in some helper function that is used only for the tests, e.g.:
export const _getPromiseFromFinallyInTests = () => _promiseFromFinally
let _promiseFromFinally
const add = parentId => async dispatch => {
...
} finally {
// not awaited here because I don't want to change the current Promise
_promiseFromFinally = dispatch(get(parentId));
}
};
and update the test to await the test-only Promise:
test("finally", async () => {
...
// but I want to fail the test if the Promise from finally is rejected
await _getPromiseFromFinallyInTests()
});

Async function does not wait for await function to end

i have an async function that do not work as expected, here is the code :
const onCreateCoachSession = async (event, context) => {
const { coachSessionID } = context.params;
let coachSession = event.val();
let opentokSessionId = 'prout';
await opentok.createSession({ mediaMode: 'relayed' }, function(
error,
session
) {
if (error) {
console.log('Error creating session:', error);
} else {
opentokSessionId = session.sessionId;
console.log('opentokSessionIdBefore: ', opentokSessionId);
const sessionId = session.sessionId;
console.log('Session ID: ' + sessionId);
coachSession.tokbox = {
archiving: true,
sessionID: sessionId,
sessionIsCreated: true,
};
db.ref(`coachSessions/${coachSessionID}`).update(coachSession);
}
});
console.log('opentokSessionIdEnd: ', opentokSessionId);
};
My function onCreateCoachSession trigger on a firebase event (it's a cloud function), but it does not end for opentok.createSession to end, i don't understand why as i put an await before.
Can anyone have an idea why my code trigger directly the last console log (opentokSessionIdEnd)
Here is a screenshot on order of console.log :
It's probably a simple problem of async/await that i missed but i cannot see what.
I thanks in advance the community for the help.
You're using createSession in callback mode (you're giving it a callback function), so it doesn't return a Promise, so it can't be awaited.
Two solutions :
1/ Use createSession in Promise mode (if it allows this, see the doc)
let session = null;
try{
session = await opentok.createSession({ mediaMode: 'relayed' })
} catch(err) {
console.log('Error creating session:', error);
}
or 2/ await a Promise
let session;
try {
session = await new Promise((resolve, reject) => {
opentok.createSession({ mediaMode: 'relayed' }, (error, session) => {
if (error) {
return reject(error)
}
resolve(session);
})
})
} catch (err) {
console.log('Error creating session:', err);
throw new Error(err);
}
opentokSessionId = session.sessionId;
console.log('opentokSessionIdBefore: ', opentokSessionId);
// ...
await means it will wait till the promise is resolved. I guess there is no promise returned in this case. you can create your own promise and handle the case

Getting 'Promise { <pending> }' message when using async-await to perform tcp client

I am performing tcp client using telnet-client node module.
const Telnet = require('telnet-client')
async function wazuhRun(host) {
let connection = new Telnet()
let ErrCode = -1;
let params = {
host: host,
port: 2345,
negotiationMandatory: false,
timeout: 1500
}
try {
await connection.connect(params)
ErrCode = 0;
} catch(error) {
ErrCode = -1;
}
return ErrCode;
}
const code = wazuhRun('linux345');
console.log(code);
On running above code, I am getting Promise { <pending> }
Please suggest what might be missing in my code
Since you're using it outside of a async function you need to treat it as a Promise:
wazuhRun('linux345').then((result) => console.log(result));
async functions are a syntatic sugar around Promises, they get translated into Promises which is why you get Promise pending.
If you were calling it from inside another async function you could use:
const code = await wazuhRun('linux345');
EDIT:
About the null return, it could be that your function throws an error before your try/catch.
wazuhRun('linux345')
.then((result) => console.log(result))
.catch((error) => console.log(error));
By adding a catch handler to your Promise you'll be able to see all errors thrown from within your async function.
Use wazuhRun('linux345').then() the it will work fine.
const Telnet = require('telnet-client')
async function wazuhRun(host) {
let connection = new Telnet()
let ErrCode = -1;
let params = {
host: host,
port: 2345,
negotiationMandatory: false,
timeout: 1500
}
try {
await connection.connect(params)
ErrCode = 0;
} catch(error) {
ErrCode = -1;
}
return ErrCode;
}
wazuhRun('linux345').then(data => {
console.log(data);
}).catch(err => {
console.log(err);
})

Limit concurrency of pending promises

I'm looking for a promise function wrapper that can limit / throttle when a given promise is running so that only a set number of that promise is running at a given time.
In the case below delayPromise should never run concurrently, they should all run one at a time in a first-come-first-serve order.
import Promise from 'bluebird'
function _delayPromise (seconds, str) {
console.log(str)
return Promise.delay(seconds)
}
let delayPromise = limitConcurrency(_delayPromise, 1)
async function a() {
await delayPromise(100, "a:a")
await delayPromise(100, "a:b")
await delayPromise(100, "a:c")
}
async function b() {
await delayPromise(100, "b:a")
await delayPromise(100, "b:b")
await delayPromise(100, "b:c")
}
a().then(() => console.log('done'))
b().then(() => console.log('done'))
Any ideas on how to get a queue like this set up?
I have a "debounce" function from the wonderful Benjamin Gruenbaum. I need to modify this to throttle a promise based on it's own execution and not the delay.
export function promiseDebounce (fn, delay, count) {
let working = 0
let queue = []
function work () {
if ((queue.length === 0) || (working === count)) return
working++
Promise.delay(delay).tap(function () { working-- }).then(work)
var next = queue.shift()
next[2](fn.apply(next[0], next[1]))
}
return function debounced () {
var args = arguments
return new Promise(function (resolve) {
queue.push([this, args, resolve])
if (working < count) work()
}.bind(this))
}
}
I don't think there are any libraries to do this, but it's actually quite simple to implement yourself:
function sequential(fn) { // limitConcurrency(fn, 1)
let q = Promise.resolve();
return function(x) {
const p = q.then(() => fn(x));
q = p.reflect();
return p;
};
}
For multiple concurrent requests it gets a little trickier, but can be done as well.
function limitConcurrency(fn, n) {
if (n == 1) return sequential(fn); // optimisation
let q = Promise.resolve();
const active = new Set();
const fst = t => t[0];
const snd = t => t[1];
return function(x) {
function put() {
const p = fn(x);
const a = p.reflect().then(() => {
active.delete(a);
});
active.add(a);
return [Promise.race(active), p];
}
if (active.size < n) {
const r = put()
q = fst(t);
return snd(t);
} else {
const r = q.then(put);
q = r.then(fst);
return r.then(snd)
}
};
}
Btw, you might want to have a look at the actors model and CSP. They can simplify dealing with such things, there are a few JS libraries for them out there as well.
Example
import Promise from 'bluebird'
function sequential(fn) {
var q = Promise.resolve();
return (...args) => {
const p = q.then(() => fn(...args))
q = p.reflect()
return p
}
}
async function _delayPromise (seconds, str) {
console.log(`${str} started`)
await Promise.delay(seconds)
console.log(`${str} ended`)
return str
}
let delayPromise = sequential(_delayPromise)
async function a() {
await delayPromise(100, "a:a")
await delayPromise(200, "a:b")
await delayPromise(300, "a:c")
}
async function b() {
await delayPromise(400, "b:a")
await delayPromise(500, "b:b")
await delayPromise(600, "b:c")
}
a().then(() => console.log('done'))
b().then(() => console.log('done'))
// --> with sequential()
// $ babel-node test/t.js
// a:a started
// a:a ended
// b:a started
// b:a ended
// a:b started
// a:b ended
// b:b started
// b:b ended
// a:c started
// a:c ended
// b:c started
// done
// b:c ended
// done
// --> without calling sequential()
// $ babel-node test/t.js
// a:a started
// b:a started
// a:a ended
// a:b started
// a:b ended
// a:c started
// b:a ended
// b:b started
// a:c ended
// done
// b:b ended
// b:c started
// b:c ended
// done
Use the throttled-promise module:
https://www.npmjs.com/package/throttled-promise
var ThrottledPromise = require('throttled-promise'),
promises = [
new ThrottledPromise(function(resolve, reject) { ... }),
new ThrottledPromise(function(resolve, reject) { ... }),
new ThrottledPromise(function(resolve, reject) { ... })
];
// Run promises, but only 2 parallel
ThrottledPromise.all(promises, 2)
.then( ... )
.catch( ... );
I have the same problem. I wrote a library to implement it. Code is here. I created a queue to save all the promises. When you push some promises to the queue, the first several promises at the head of the queue would be popped and running. Once one promise is done, the next promise in the queue would also be popped and running. Again and again, until the queue has no Task. You can check the code for details. Hope this library would help you.
Advantages
you can define the amount of concurrent promises (near simultaneous requests)
consistent flow: once one promise resolve, another request start no need to guess the server capability
robust against data choke, if the server stop for a moment, it will just wait, and next tasks will not start just because the
clock allowed
do not rely on a 3rd party module it is Vanila node.js
1st thing is to make https a promise, so we can use wait to retrieve data (removed from the example)
2nd create a promise scheduler that submit another request as any promise get resolved.
3rd make the calls
Limiting requests taking by limiting the amount of concurrent promises
const https = require('https')
function httpRequest(method, path, body = null) {
const reqOpt = {
method: method,
path: path,
hostname: 'dbase.ez-mn.net',
headers: {
"Content-Type": "application/json",
"Cache-Control": "no-cache"
}
}
if (method == 'GET') reqOpt.path = path + '&max=20000'
if (body) reqOpt.headers['Content-Length'] = Buffer.byteLength(body);
return new Promise((resolve, reject) => {
const clientRequest = https.request(reqOpt, incomingMessage => {
let response = {
statusCode: incomingMessage.statusCode,
headers: incomingMessage.headers,
body: []
};
let chunks = ""
incomingMessage.on('data', chunk => { chunks += chunk; });
incomingMessage.on('end', () => {
if (chunks) {
try {
response.body = JSON.parse(chunks);
} catch (error) {
reject(error)
}
}
console.log(response)
resolve(response);
});
});
clientRequest.on('error', error => { reject(error); });
if (body) { clientRequest.write(body) }
clientRequest.end();
});
}
const asyncLimit = (fn, n) => {
const pendingPromises = new Set();
return async function(...args) {
while (pendingPromises.size >= n) {
await Promise.race(pendingPromises);
}
const p = fn.apply(this, args);
const r = p.catch(() => {});
pendingPromises.add(r);
await r;
pendingPromises.delete(r);
return p;
};
};
// httpRequest is the function that we want to rate the amount of requests
// in this case, we set 8 requests running while not blocking other tasks (concurrency)
let ratedhttpRequest = asyncLimit(httpRequest, 8);
// this is our datase and caller
let process = async () => {
patchData=[
{path: '/rest/slots/80973975078587', body:{score:3}},
{path: '/rest/slots/809739750DFA95', body:{score:5}},
{path: '/rest/slots/AE0973750DFA96', body:{score:5}}]
for (let i = 0; i < patchData.length; i++) {
ratedhttpRequest('PATCH', patchData[i].path, patchData[i].body)
}
console.log('completed')
}
process()
The classic way of running async processes in series is to use async.js and use async.series(). If you prefer promise based code then there is a promise version of async.js: async-q
With async-q you can once again use series:
async.series([
function(){return delayPromise(100, "a:a")},
function(){return delayPromise(100, "a:b")},
function(){return delayPromise(100, "a:c")}
])
.then(function(){
console.log(done);
});
Running two of them at the same time will run a and b concurrently but within each they will be sequential:
// these two will run concurrently but each will run
// their array of functions sequentially:
async.series(a_array).then(()=>console.log('a done'));
async.series(b_array).then(()=>console.log('b done'));
If you want to run b after a then put it in the .then():
async.series(a_array)
.then(()=>{
console.log('a done');
return async.series(b_array);
})
.then(()=>{
console.log('b done');
});
If instead of running each sequentially you want to limit each to run a set number of processes concurrently then you can use parallelLimit():
// Run two promises at a time:
async.parallelLimit(a_array,2)
.then(()=>console.log('done'));
Read up the async-q docs: https://github.com/dbushong/async-q/blob/master/READJSME.md

Categories

Resources