How to do repeated requests until one succeeds without blocking in node? - javascript

I have a function that takes a parameter and a callback. It's supposed to do a request to a remote API and get some info based on the parameter. When it gets the info, it needs to send it to the callback. Now, the remote API sometimes fails to provide. I need my function to keep trying until it manages to do it and then call the callback with the correct data.
Currently, I have the below code inside the function but I think that stuff like while (!done); isn't proper node code.
var history = {};
while (true) {
var done = false;
var retry = true;
var req = https.request(options, function(res) {
var acc = "";
res.on("data", function(msg) {
acc += msg.toString("utf-8");
});
res.on("end", function() {
done = true;
history = JSON.parse(acc);
if (history.success) {
retry = false;
}
});
});
req.end();
while (!done);
if (!retry) break;
}
callback(history);
How do I do it the right way?

There is no need to re-invent the wheel... you can use a popular async utility library, 'retry' method in this case.
// try calling apiMethod 3 times
async.retry(3, apiMethod, function(err, result) {
// do something with the result
});
// try calling apiMethod 3 times, waiting 200 ms between each retry
async.retry({times: 3, interval: 200}, apiMethod, function(err, result) {
// do something with the result
});
async GitHub page
async.retry docs

Definitely not the way to go - while(!done); will go into a hard loop and take up all of your cpu.
Instead you could do something like this (untested and you may want to implement a back-off of some sort):
function tryUntilSuccess(options, callback) {
var req = https.request(options, function(res) {
var acc = "";
res.on("data", function(msg) {
acc += msg.toString("utf-8");
});
res.on("end", function() {
var history = JSON.parse(acc); //<== Protect this if you may not get JSON back
if (history.success) {
callback(null, history);
} else {
tryUntilSuccess(options, callback);
}
});
});
req.end();
req.on('error', function(e) {
// Decide what to do here
// if error is recoverable
// tryUntilSuccess(options, callback);
// else
// callback(e);
});
}
// Use the standard callback pattern of err in first param, success in second
tryUntilSuccess(options, function(err, resp) {
// Your code here...
});

I found Dmitry's answer using the async utility library very useful and the best answer.
This answer expands his example to a working version that defines the apiMethod function and passes it a parameter. I was going to add the code as a comment but a separate answer is clearer.
const async = require('async');
const apiMethod = function(uri, callback) {
try {
// Call your api here (or whatever thing you want to do) and assign to result.
const result = ...
callback(null, result);
} catch (err) {
callback(err);
}
};
const uri = 'http://www.test.com/api';
async.retry(
{ times: 5, interval: 200 },
function (callback) { return apiMethod(uri, callback) },
function(err, result) {
if (err) {
throw err; // Error still thrown after retrying N times, so rethrow.
}
});
Retry documentation: https://caolan.github.io/async/docs.html#retry
Note, an alternative to calling apiMethod(uri, callback) in the task is to use async.apply:
async.retry(
{times: 5, interval: 200},
async.apply(task, dir),
function(err, result) {
if (err) {
throw err; // Error still thrown after retrying N times, so rethrow.
}
});
I hope this provides a good copy/paste boiler plate solution for someone.

Is this what you are trying to do?
var history = {};
function sendRequest(options, callback) {
var req = https.request(options, function (res) {
var acc = "";
res.on("data", function (msg) {
acc += msg.toString("utf-8");
});
res.on("end", function () {
history = JSON.parse(acc);
if (history.success) {
callback(history);
}
else {
sendRequest(options, callback);
}
});
});
req.end();
}
sendRequest(options, callback);

without using any library.. retry untill it succeed AND retry count is less than 11
let retryCount = 0;
let isDone = false;
while (!isDone && retryCount < 10) {
try {
retryCount++;
const response = await notion.pages.update(newPage);
isDone = true;
} catch (e) {
console.log("Error: ", e.message);
// condition for retrying
if (e.code === APIErrorCode.RateLimited) {
console.log(`retrying due to rate limit, retry count: ${retryCount}`);
} else {
// we don't want to retry
isDone = true;
}
}
}

I've solved this problem using the retry module.
Example:
var retry = require('retry');
// configuration
var operation = retry.operation({
retries: 2, // try 1 time and retry 2 times if needed, total = 3
minTimeout: 1 * 1000, // the number of milliseconds before starting the first retry
maxTimeout: 3 * 1000 // the maximum number of milliseconds between two retries
});
// your unreliable task
var task = function(input, callback) {
Math.random() > 0.5
? callback(null, 'ok') // success
: callback(new Error()); // error
}
// define a function that wraps our unreliable task into a fault tolerant task
function faultTolerantTask(input, callback) {
operation.attempt(function(currentAttempt) {
task(input, function(err, result) {
console.log('Current attempt: ' + currentAttempt);
if (operation.retry(err)) { // retry if needed
return;
}
callback(err ? operation.mainError() : null, result);
});
});
}
// test
faultTolerantTask('some input', function(err, result) {
console.log(err, result);
});

You could try something along the following lines. I'm writing a general idea, you should replace trySomething with your HTTP request.
function keepTrying(onSuccess) {
function trySomething(onSuccess, onError) {
if (Date.now() % 7 === 0) {
process.nextTick(onSuccess);
} else {
process.nextTick(onError);
}
}
trySomething(onSuccess, function () {
console.log('Failed, retrying...');
keepTrying(onSuccess);
});
}
keepTrying(function () {
console.log('Succeeded!');
});
I hope this helps.

A library called Flashheart is also a suitable alternative. It's a rest client designed to be easy to use and supports retries.
For example, configure Flashheart to retry 10 times, with a delay of 500ms between requests:
const client = require('flashheart').createClient({
retries: 10,
retryTimeout: 500
});
const url = "https://www.example.com/";
client.get(url, (err, body) => {
if (err) {
console.error('handle error: ', err);
return;
}
console.log(body);
});
For further information, check out the docs:
https://github.com/bbc/flashheart
Disclaimer: I have contributed to this library.

const INITIAL_DELAY = 2000
const MAX_ATTEMPTS = 10
function repeatUntilSucceeds(request) {
return new Promise((resolve, reject) => {
let attempt = 0
let delay = INITIAL_DELAY
function handleErrorRec(error) {
if (attempt < MAX_ATTEMPTS) {
setTimeout(execRequestRec, delay)
attempt += 1
delay *= 2
} else {
reject(error)
}
}
function execRequestRec() {
request().then(({ data, status, statusText }) => {
if (status === 200) {
resolve(data)
} else {
handleErrorRec(new Error(statusText))
}
}).catch(handleErrorRec)
}
execRequestRec()
})
}

Related

How can I add a setTimeOut() to run a function and insert data multiple times?

A few days ago I did a project and I had some problems, which where solved in this question, let me try to resume it.
I need to insert multiple objects into a DB in SQLServer, for that, I did a function that loops another function, which opens a connection, inserts and closes the connection, then, repeats it over and over again.
It worked fine, till today that was tested in a collegue PC, in the server of the job, I get this error:
Error: Requests can only be made in the LoggedIn state, not the LoggedInSendingInitialSql state
Error: Requests can only be made in the LoggedIn state, not the SentLogin7WithStandardLogin state
Here's the code we tested (the same in my last question), it works in my PC, but not in the other:
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `https://api.openweathermap.org/data/2.5/weather?lat=${offices[index].latjson}&lon=${offices[index].lonjson}&appid=${api_key}&units=metric&lang=sp`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
var myObject = {
Id_Oficina: offices[index].IdOficina,
...
};
const request = new Request(
"EXEC USP_BI_CSL_insert_reg_RegistroTemperaturaXidOdicina #IdOficina, ...",
function (err) {
if (err) {
console.log("Couldnt insert data (" + index + "), " + err);
} else {
console.log("Data with ID: " + myObject.Id_Oficina +" inserted succesfully(" + index + ").")
}
}
);
request.addParameter("IdOficina", TYPES.SmallInt, myObject.Id_Oficina);
...
request.on("row", function (columns) {
columns.forEach(function (column) {
if (column.value === null) {
console.log("NULL");
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
request.on("requestCompleted", function () {
connection.close();
});
connection.execSql(request);
});
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
functionLooper();
So, I thought it would be a good idea to use a setTimeOut, to:
Run functionLooper().
Open connection, insert and close.
Wait a few seconds.
Repeat.
So, I changed to this:
setTimeout(functionLooper, 2000);
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
It works, but, as you can see, only waits when I first run it, so tried to make a function that runs setTimeout(functionLooper, 2000); like functionLooper() does, but it didn't work either.
function TimerLooper() {
for (let i = 0; i < offices.length; i++) {
setTimeout(functionLooper, 500);
}
}
function functionLooper() {
for (let i = 0; i < offices.length; i++) {
let response = insertOffice(i);
}
}
TimerLooper();
This shows me this error:
Error: Validation failed for parameter 'Descripcion'. No collation was set by the server for the current connection.
file:///...:/.../.../node_modules/node-fetch/src/index.js:95
reject(new FetchError(request to ${request.url} failed, reason: ${error.message}, 'system', error));
^ FetchError: request to https://api.openweathermap.org/data/2.5/weather?lat=XX&lon=XX&appid=XX&units=metric&lang=sp failed, reason: connect ETIMEDOUT X.X.X.X:X
So, I have some questions
How can I use properly setTimeOut? I did this function based on what I watch here in SO, but I just can't get it and I don't know what I'm doing wrong.
Why it works in my PC and the other don't? Do we have to change some kind of config or something?
Using setTimeOut, is the correct way to solve this problem? if not, what would you suggest me?
Could you do something like:
//edit: not disconnect but end
connection.on("end", function(){
functionLopper(index++)
})
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
Edit: according to tidious doc
There is an end event emitted on connection.close()
Event: 'end'
function () { }
The connection has ended. This may be as a result of the client calling close(), the server closing the connection, or a network error.
My suggestion from above
var config = {
...
};
function insertOffice(index) {
var connection = new Connection(config);
connection.on("connect", function (err) {
console.log("Successful connection");
});
connection.connect();
let url = `...`;
fetch(url)
.then((response) => { return response.json(); })
.then(function (data) {
...
});
connection.on("end", function(){
functionLopper(index++)
})
}
function functionLooper(i) {
if(i<offices.length) insertOffice(i)
}
``

Using Request.js and Cheerio.js in Node/Express return empty array

I'm building a simple scraper using Request.js and Cheerio.js within Express. Right now I'm only looking for the title of the site. Instead scraping a website one by one, I put the list in an array. I parse through them and then use Cheerio.js to find the Title of the website. When I console log the titles, they come out fine, but I want to ultimately show them on a html page. Please note, I'm very new to programming so if you could provide detailed feedback, that would be incredibly helpful (below is the code I've been working on). Thanks in advance!
function parseSites(urls) {
var parsedSites = [];
urls.forEach(function(site) {
request(site, function(err, res, body) {
if(err) {
console.log(err);
} else {
var $ = cheerio.load(body);
parsedSites.push($('title').text());
}
}
});
});
return parsedSites;
}
Please refer to the below code for a working implementation
var request = require('request-promise')
var cheerio = require("cheerio")
function parseSites(urls, callback) {
var parsedSites = [];
var promiseList = urls.map(getPage)
Promise.all(promiseList).then(function (data) {
callback(data.map(parse))
})
return parsedSites;
}
function getPage(url) {
return request.get(url)
}
function parse(body) {
console.log("parsing body")
var $ = cheerio.load(body);
return $('title').text()
}
parseSites(['https://www.google.com','https://www.facebook.com'],function(data) {
console.log(data)
})
First you need to understand the difference between asynchronous and synchronous code. Lets see an example:
function testFor() {
for(let i=0;i<5;++i){
console.log(i);
}
}
-
console.log('start:');
testFor();
console.log('end:');
// Here you get the expected output because this code is synchronous.
//output:
start:
0
1
2
3
4
end:
-
console.log('start:');
setTimeout(testFor,1000);
console.log('end:');
// Here you don't get your expected output because setTimeout is asynchronous .
//output:
start:
end:
0
1
2
3
4
First the console.log('start:'); is called.
Then setTimeout(testFor,1000); (but it is async and the call
will execute in 1 second).
Immediately after the console.log('end:');
is called.
Finally 1 second after, the testFor() is executed and it
prints 0 1 2 3 4
The next point is that there is an error in your code!
function parseSites(urls) {
var parsedSites = [];
urls.forEach(function(site) {
request(site, function(err, res, body) {
if(err) {
console.log(err);
} else {
var $ = cheerio.load(body);
parsedSites.push($('title').text());
}
//} ! THIS bracket should be removed
});
});
return parsedSites;
}
So your problem is that the 'request' in the forEach loop is an async function that will call the callback 'function(err, res, body)' once there is a response from the web page.
My solutions for this:
'use strict'
const cheerio = require('cheerio');
const request = require('request');
const async = require('async');
const urls = ['http://stackoverflow.com/','http://hackaday.com/','https://www.raspberrypi.org/','https://cheerio.js.org/'];
//SOLUTION 1: do what you need to do when all calls are done using recursion
let i=0;
let parsedSites = [];
parseSites(urls[i],parsedSites);
function finalCall(sites) {
console.log(sites);
}
function parseSites(site,parsedSites) {
++i;
request(site, function(err, res, body) {
if(err) {
console.log(err);
} else {
let $ = cheerio.load(body);
let title = $('title').text();
console.log(title);
parsedSites.push(title);
}
if(i<urls.length){
parseSites(urls[i],parsedSites);// recursive call;
}
else{
finalCall(parsedSites);// when all sites are done.
}
});
//return parsedSites;// cant return! we are in async calls!
}
//SOLUTION 2: do what you need to do when all calls are done using 'async'
parseSites(urls);
function finalCall(sites) {
console.log(sites);
}
function parseSites(urls) {
let parsedSites = [];
async.each(urls,function parseSite(site, callback) {
request(site, function (err, res, body) {
if (err) {
callback(err);
} else {
let $ = cheerio.load(body);
parsedSites.push($('title').text());
callback();
}
})
},function (err) {
if(err) console.log(err);
else finalCall(parsedSites);
});
}
Async github page
Async example

Wrapping promise into a Sync function

I'm writing a node CLI where synchronous behaviour is typically more appropriate than async and I'd like to be able to leverage the following convention:
# Write functional code as an async function which returns a Promise
function foobar() { ... }
# Uses async function but blocks on promise fulfillments
function foobarSync() { ... }
So for instance -- using the RSVP promise implementation -- I have written the following async function for calling shell scripts:
var shell = function (params,options) {
options = extend({timeout: 4000},options);
var commandResponse = '';
var errorMessage ='';
// resolve with a promise
return new RSVP.Promise(function(resolve,reject) {
var spawn = require('child_process').spawn;
var timeout = setTimeout(function() {
reject(new Error('Timed out')); // fulfil promise
}, options.timeout);
try {
var shellCommand = spawn(params.shift(),params);
} catch (err) {
clearTimeout(timeout);
reject(err); // fulfil promise
}
shellCommand.stdout.setEncoding('utf8');
shellCommand.stderr.setEncoding('utf8');
shellCommand.stdout.on('data', function (data) {
commandResponse = commandResponse + data;
});
shellCommand.stderr.on('data', function (data) {
errorMessage = errorMessage + data;
});
shellCommand.on('close', function (code) {
if(code !== 0) {
clearTimeout(timeout);
reject({code:code, message:errorMessage}); // fulfil promise
} else {
clearTimeout(timeout);
resolve(commandResponse); // fulfil promise
}
});
});
};
This works, now I want to make synchronously:
# Works
shell(['ls','-l']).then( function (results) {
console.log('Result was: %s', results);
});
# Would like to see work
var results = shellSync(['ls','-l']);
What I thought would work for shellSync is:
var shellSync = function (params,options) {
options = extend({pollingInterval: 100},options);
var shellResults = null;
shell(params,options).then(
function(results) {
console.log('Results: %s', results);
shellResults = results;
// return results;
},
function(err) {
console.log('Error: %s', err);
shellResults = err;
// return err;
}
);
while(!shellResults) {
// wait until a Promise is returned or broken (and sets the shellResults variable)
}
return shellResults;
};
Unfortunately this just runs, never returning. I though that maybe instead of the while loop I'd implement a polling interval to execute the conditional statement on:
var polling = setInterval(function() {
// return once shellResults is set;
// this setting takes place when either a resolve() or reject()
// is called in Promise
if(shellResults) {
console.log('results are available');
clearInterval(polling);
return shellResults;
}
},options.pollingInterval);
while(1) {
// wait
}
Of course, removing the while loop results in the function returning immediately (with an as-yet unfulfilled promise). So then I tried to combine the "waiting" functionality of the while loop with a polling frequency implemented
The easiest way is to consume sync API on your internal code if you want it to be sync, but you want to wrap the async code as sync, right ?
I think this can be acchieved using fibers (https://www.npmjs.org/package/fibers), or more specifically, futures, a little example
var Future = require("fibers/future");
// async API, will be wrapped by syncFoo
var asyncFoo = function(cb) {
setTimeout(function() {
cb("foo");
}, 1500);
};
var syncFoo = function() {
var future = new Future();
console.log("asyncFoo will be called");
asyncFoo(function(x) {
future.return(x);
});
console.log("asyncFoo ended");
return future.wait();
};
(function() {
console.log("* Syncfoo will be called");
syncFoo();
console.log("* Syncfoo ended");
console.log("* Syncfoo will be called again");
syncFoo();
console.log("* Syncfoo ended again");
}).future()();
More specific for your code:
var shellSync = function(params, options) {
var future = new Future();
options = extend({pollingInterval: 100},options);
var shellResults = null;
shell(params,options).then(
function(results) {
console.log('Results: %s', results);
future.return({results: results});
},
function(err) {
console.log('Error: %s', err);
future.return({err: err});
}
);
var ret = future.wait();
if (ret.err) {
throw ret.err;
} else {
return ret.results;
}
};
EDIT NOTE: You should wrap all in (function() {...}).future()(); to run this on a fiber

Node.js unexpected stack overflow with multiple get requests

I have a function that GETs a JSON object from a remote server, or from a local cache on-disk.
In a use-case, i have to call this function several thousand times with varying arguments, but when i do so, i get max stack overflow errors. I must be making a recursive call somewhere, but i can't see where it could be as my process.nextTick function calls seem to be in the right place.
I get none of my log.error readouts in the console, which would be evident if any of the recursive calls to retry the request were made.
The console output shows a repeated occurrence of
(node) warning: Recursive process.nextTick detected. This will break in the next version of node. Please use setImmediate for recursive deferral.
then...
RangeError: Maximum call stack size exceeded
Then the program exits.
Can anyone offer any help regarding what i may be doing wrong? I'm completely stumped.
Below is the function that invokes the problematic function "tf2inv.loadInventory()"
function refreshInventories(accounts, force, callback) {
//job executes download function, then pushes to inventories object
var inventories = {};
var Qinv = async.queue(function (task, invCallback) {
tf2inv.loadInventory(
task.force,
task.steamid,
function(inv, alias) {
inventories[alias] = inv;
process.nextTick(invCallback);
}
);
}, 100)
//when all queue jobs have finished, callback with populated inventories object
Qinv.drain = function (err) {
log.info('All inventories downloaded');
callback(inventories);
}
//adding jobs to the queue
for (var i = accounts.length - 1; i >= 0; i--) {
Qinv.push({
force: force,
steamid: accounts[i]
});
};
}
Shown here is the function that either parses from the cache, or requests from the remote server.
//tf2inv
var loadInventory = function(force, sid, callback) {
var invLoc = invFolder+sid
if(force) {
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
}
if(fs.existsSync(invLoc)) {
var body = fs.readFileSync(invLoc);
try {
var inventory = JSON.parse(body);
} catch (e) {
fs.unlinkSync(invLoc);
log.error("parsing " + sid+"'s inventory");
loadInventory(true, sid, invFolder, callback);
return;
}
process.nextTick(function() { callback(inventory, sid) })
return;
} else {
var urlPre = "http://api.steampowered.com/IEconItems_440/GetPlayerItems/v0001/?key=";
var urlSidPre = "&steamid=";
var urlInvSuf = "&inventory=yes";
var URL = urlPre+steam_API+urlSidPre+sid+urlInvSuf;
http.get(URL, function (res) {
var body = '';
res.on('data', function (data) {
body+=data;
fs.appendFile(invLoc, data);
});
res.on('end', function() {
try {
inventory = JSON.parse(body);
} catch (e) {
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.error("parsing " + sid+"'s downloaded inventory");
loadInventory(force, sid, invFolder, callback)
return;
}
process.nextTick(function() { callback(inventory, sid) })
return;
});
res.on('error', function (e, socket) {
log.error(sid + " inventory error")
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.debug('Retrying inventory')
loadInventory(force, sid, invFolder, callback);
return;
})
res.on('close', function () {res.emit('end'); log.error('connection closed')})
})
.on('error', function(e) {
log.error(JSON.stringify(e));
if(fs.existsSync(invLoc)) {
fs.unlinkSync(invLoc);
}
log.debug('Retrying inventory')
loadInventory(force, sid, invFolder, callback)
return;
})
}
};
It is likely to be failing to parse the body coming back from the server. It then immediately calls itself again, failing again, infinitely looping and causing a stack overflow.
I suggest you do not retry automatically on a failed parse - if it fails once, it is likely to fail again. It would be best to call back with the error, and let the part of your programming calling this handle the error, or passing it back to the point where it can let the user know that something is wrong.

How to call a function after an asynchronous for loop of Object values finished executing

I want to call a function after an asynchronous for loop iterating through values of an Javascript object finishes executing. I have the following code
for (course in courses) {
var url = '...' + courses[course];
request(url, (function (course) {
return function (err, resp, body) {
$ = cheerio.load(body);
//Some code for which I use object values
};
})(course));
}
This can be done in vanilla JS, but I recommend the async module, which is the most popular library for handling async code in Node.js. For example, with async.each:
var async = require('async');
var courseIds = Object.keys(courses);
// Function for handling each course.
function perCourse(courseId, callback) {
var course = courses[courseId];
// do something with each course.
callback();
}
async.each(courseIds, perCourse, function (err) {
// Executed after each course has been processed.
});
If you want to use a result from each iteration, then async.map is similar, but passes an array of results to the second argument of the callback.
If you prefer vanilla JS, then this will work in place of async.each:
function each(list, func, callback) {
// Avoid emptying the original list.
var listCopy = list.slice(0);
// Consumes the list an element at a time from the left.
// If you are concerned with overhead in using the shift
// you can accomplish the same with an iterator.
function doOne(err) {
if (err) {
return callback(err);
}
if (listCopy.length === 0) {
return callback();
}
var thisElem = listCopy.shift();
func(thisElem, doOne);
}
doOne();
}
(taken from a gist I wrote a while back)
I strongly suggest that you use the async library however. Async is fiddly to write, and functions like async.auto are brilliant.
A possible simple JS solution would be to do something like this.
var courses = {
lorum: 'fee',
ipsum: 'fy',
selum: 'foe'
};
var keys = Object.keys(courses);
var waiting = keys.length;
function completedAll() {
console.log('completed all');
}
function callOnCourseComplete(course, func) {
console.log('completed', course);
waiting -= 1;
if (!waiting) {
func();
}
}
var delay = 10000;
keys.forEach(function(course) {
var url = '...' + courses[course];
console.log('request', url);
setTimeout((function(closureCourse) {
return function( /* err, resp, body */ ) {
// Some code for which I use object values
callOnCourseComplete(closureCourse, completedAll);
};
}(course)), (delay /= 2));
});
Update: Probably a better Javascript solution would be to use Promises
const courses = {
lorum: 'fee',
ipsum: 'fy',
selum: 'foe',
};
function completedAll() {
console.log('completed all');
}
function callOnCourseComplete(courseName) {
console.log('completed', courseName);
}
let delay = 10000;
const arrayOfPromises = Object.keys(courses).map(courseName => (
new Promise((resolve, reject) => {
const url = `...${courses[courseName]}`;
console.log('request', url);
setTimeout((err, resp, body) => {
if (err) {
reject(err);
}
// Some code for which I use object values
resolve(courseName);
}, (delay /= 2));
}))
.then(callOnCourseComplete));
Promise.all(arrayOfPromises)
.then(completedAll)
.catch(console.error);

Categories

Resources