I want to call web api at some interval through javascript/angular js 1.x. I have to do this because web api that I am calling restricts the number of calls per 300 second.
Please find below code that I am trying to make it work.
RecordList is list of objects that I need to pass with webapi call.
setDelay is function that adds the delay to each call with setTimeout function.
Inside the setTimeout function, I am calling webapi and it is creating the record successfully. I want a object or event or piece of code which gives me that all the records has been successfully created or not. That means how to know that all the promises inside the setTimeout function has been resolved or not. I know Promise.all and $q.all uses but they do not seem to be working with setTimeout function. Could you suggest something for this?
var waitInterval = 300;
var promiseArray = [];
function setDelay(obj, s) {
setTimeout(function() {
var SomePromise = $http.post('odataURI', obj).then(function success(result) {
//resolve(result);
});
promiseArray.push(SomePromise);
}, waitInterval * s);
}
for (var s = 1; s <= RecordList.length; s++) {
setDelay(RecordList[s - 1], s);
}
You can use a recursive function to run the next promise after one finished.
function post(n) {
return Promise.resolve(n);
}
function setDelay(n, milliseconds) {
return new Promise(function(resolve, reject) {
setTimeout(function() {
post(n).then(function(res) {
resolve(res);
});
}, milliseconds)
})
}
function call(list, delay = 1000, index = 0, result = []) {
setDelay(list[index], delay).then(
function(res) {
console.log(res);
result.push(res);
if (index == list.length - 1) {
console.log('Result: ', result); // You can do something here
return result;
} else {
return call(list, delay, index + 1, result);
}
}
);
}
var RecordList = [1, 2, 3, 4, 5];
call(RecordList);
You can use async await for the same
var waitInterval = 300;
var promiseArray = [];
async FunctionOfLoop() {
var dataMain;
for (var s = 1; s <= RecordList.length; s++) {
dataMain = await setDelay(RecordList[s - 1], s); // here you will get the data after api consume successfully.
}
}
function setDelay(obj, s) {
return new Promise ((resolve, reject) => {
setTimeout(function() {
var SomePromise = $http.post('odataURI', obj).then(function success(result) {
resolve(result);
});
}, waitInterval * s);
});
}
I hope it helps you out.
Without Recursion also its possible,
var waitInterval = 300;
var promiseArray = [];
var tick =0;
function setDelay(obj, s) {
setTimeout(function() {
var SomePromise = Promise.resolve(obj);
SomePromise.then(function success(result) {
tick ++;
console.log(result);
if ( tick === RecordList.length) {
Promise.all(promiseArray).then((res) => {
console.log(res);
});
}
});
promiseArray.push(SomePromise);
}, waitInterval * s);
}
var RecordList = [1,2,3,4,5];
for (var s = 1; s <= RecordList.length; s++) {
setDelay(RecordList[s - 1], s);
}
Related
I suspect I've fundementally misunderstood Javascript promises, any ideas?
I have a pretty function that queries a database containing music that looks like this:
function searchDatabaseForTrack(query,loadedResults){
loadedResults = loadedResults || [];
desiredResults = 100;
if (loadedResults.length < desiredResults) {
try {
databaseApi.searchTracks(query, {"offset":loadedResults.length, "limit":"50", }).then(function(data){
i=0
if (data.tracks.items.length == 0) {
console.log(`Already loaded all ${loadedResults.length} tracks!`)
console.log(loadedResults)
return loadedResults;
}
else {
for (thing in data.tracks.items){
loadedResults.push(data.tracks.items[i]);
i=i+1;
}
console.log(loadedResults.length, " tracks collected");
searchDatabaseForTrack(query,loadedResults)
}
});
} catch(err) {
console.log("ERROR!", err)
console.log(loadedResults)
return loadedResults;
}
} else {
console.log(loadedResults)
return loadedResults;
}
}
And then a bit later, I try to call and use the data retrieved.
function getArtistTracks(artistName){
searchDatabaseForTrack(artistName).then(function(data){
console.log(songs);
songs.sort(function(a,b){
var c = new Date(a.track.album.release_date);
var d = new Date(b.track.album.release_date);
return d-c;
});
console.log("songs", songs);
var newsongs=[];
i=0
for (song in songs) {
newsongs.push(songs[i].track.uri);
i++
};
return newsongs;
});
}
What I'm trying to do is get the second function "getArtistTracks" to wait for the completion of the query in the first function. Now I could just call the databaseApi.searchTracks directly, but there's a limit of 50 tracks returned per result — which kind of screws me over.
searchDatabaseForTrack().then(...) shouldn't work since searchDatabaseForTrack() doesn't return a promise, so you can either return a promise or use an async function.
instead of a recursive function, you could simply call databaseApi in a for loop,
the desiredResult should be an argument and not hardcoded in the function,
async function searchDatabaseForTrack(query, desiredResults){
let loadedResults = [], data, currOffset = 0;
const iterations = Math.ceil(desiredResults / 50);
for(let n = 0 ; n < iterations; n++){
cuurOffset = n * 50;
data = await databaseApi.searchTracks(query, {"offset":currOffset, "limit":"50", });
if (data.tracks.items.length == 0) {
console.log(`Already loaded all ${loadedResults.length} tracks!`)
console.log(loadedResults)
return loadedResults;
}
else {
loadedResults = loadedResults.concat(data.tracks.items);
console.log(loadedResults.length, " tracks collected");
}
}
return loadedResults;
}
the rest should be fine as long as you add .catch() to handle errors ( as mentionned in previous answer ) which are thrown automatically without the need of the try/catch block :
function getArtistTracks(artistName, 100){
searchDatabaseForTrack(artistName).then((songs) => {
// your previous code
})
.catch((err) => {
// handle error
});
});
Have searchDatabaseForTrack use Promise.all to return the loadedResults after all results have been gotten. Also, make sure not to implicitly create global variables as you're doing with thing. For example, try something like this:
async function searchDatabaseForTrack(query) {
const desiredResults = 100;
const trackPromises = Array.from(
({ length: Math.ceil(desiredResults / 50) }),
(_, i) => {
const offset = i * 50;
return databaseApi.searchTracks(query, { offset, limit: 50 });
}
);
const itemChunks = await Promise.all(trackPromises);
const loadedResults = itemChunks.reduce((a, { tracks: { items }}) => (
[...a, ...items]
), []);
return loadedResults;
};
and
searchDatabaseForTrack(artistName).then((loadedResults) => {
// do stuff with loadedResults
})
.catch((err) => {
console.log("ERROR!", err)
// handle error
});
I am trying to get a bunch of ID's from an API and then form a sequence of requests which would make further calls to an API to fetch some parameters. These would be totaled and i expect the output results to be pushed as JSON array.
The problem is REST call is async and i've put a promise but not sure when to resolve the promise back to the calling function, the rest call some times take a second or 2 to respond back.
I would like know at what point can i resolve the promise or how to know when the totals have been computed ?
The Route
app.get("/sonar/:x_id",function(req,resp) {
getRestSonar(req.params.x_id).then(function (fromResolve) {
resp.send(fromResolve);
});
});
The function with promise which makes the rest call loops
var getRestSonar = function(requestX) {
return new Promise(function(resolve,reject) {
var unirest = require("unirest");
var reqx = unirest("GET", "http://sonarqubexxServer/api/projects");
var outputJson = {
table: []
};
reqx.end(function (res) {
if (res.error) throw new Error(res.error);
// console.log(res.body);
var result = res.body;
//var needle = req.params.csi_id;
var needle = requestX;
var TotalDuplicateLines = 0;
var TotalBugs = 0;
var TotalNcloc = 0;
var TotalCodeSmells = 0;
var TotalVulnerabilities = 0;
for (var i=0;i<result.length;i++) {
if (result[i].nm.indexOf(needle) !== -1) {
console.log(result[i].k);
var queryUrl = "http://sonarqubexxServer/api/resources?resource="+result[i].k+"&metrics=code_smells,bugs,vulnerabilities,ncloc,coverage,duplicated_lines&format=json"
console.log(queryUrl);
var subrequest = unirest("GET",queryUrl);
subrequest.end(function (resXX) {
if (resXX.error);
var resXXResult = resXX.body;
for (var i=0;i<resXXResult.length;i++) {
// var duplicateData = resXXResult[0].msr.filter(item => item.key == 'duplicated_lines');
resXXResult[i].msr.forEach(m => {
if (m.key === 'duplicated_lines') {
console.log('Duplicated Lines ' + m.val);
TotalDuplicateLines += m.val;
}
else if(m.key === 'bugs' ) {
console.log('Bugs ' + m.val);
TotalBugs += m.val;
}
else if(m.key === 'ncloc' ) {
console.log('Lines of Code ' + m.val);
TotalNcloc += m.val;
}
else if(m.key === 'code_smells' ) {
console.log('Code Smells ' + m.val);
TotalCodeSmells += m.val;
}
else if(m.key === 'vulnerabilities' ) {
console.log('Vulnerabilities ' + m.val);
TotalVulnerabilities += m.val;
outputJson.table.push({totduplines:TotalDuplicateLines},{totVul:TotalVulnerabilities});
}
});
console.log("Iam here with I :: " + i);
if (i === (resXXResult.length - 1)) {
//Should i resolve here makes no sense
console.log("Resolved the promise now..");
}
//The for ends here
}
// I see this is a bad place to resolve..
resolve(outputJson);
});
}
}
});
});
}
EDIT : As suggested in the comments, split the calls into smaller
sections
Now, i fetch the api calls seperatly create an array out of it, then use promises to call back to the API ? how do i resolve each call by looping over it ?
When i try to loop it always resolves request[0] and then comes out of the promise, how can i create a promise array and wait for them to complete ?
app.get("/sonar/:csi_id",function(req,resp) {
var collectiveResult = [];
getRestSonar(req.params.csi_id).then(function (fromResolve) {
return splitReqUrl(fromResolve);
}).then(function(fromSplitUrl) {
console.log("I am from split url ::::" + fromSplitUrl);
return getSubSonarProperties(fromSplitUrl);
}).then(function(fromsubSonar) {
collectiveResult.push(fromsubSonar);
console.log("+++++++++++++++++++++++++++");
console.log(fromsubSonar);
resp.send(collectiveResult);
});
});
var getSubSonarProperties = function(getUrl) {
return new Promise(function(resolve,reject) {
var getSubRest = require("unirest");
console.log("Attempting to GET " + getUrl);
var req = getSubRest("GET",getUrl);
var outputJson = {
table: []
}
var TotalDuplicateLines = 0;
var TotalBugs = 0;
var TotalNcloc = 0;
var TotalCodeSmells = 0;
var TotalVulnerabilities = 0;
req.end(function (res) {
if (res.error);
var resXXResult = res.body;
resolve(resXXResult);
});
});
}
var splitReqUrl = function(request) {
return new Promise(function(resolve,reject) {
resolve(request[1]);
//for(var i=0; i< request.length; i++) {
// resolve(request[i]);
//}
});
}
var getRestSonar = function(requestX) {
return new Promise(function(resolve,reject) {
var unirest = require("unirest");
var reqx = unirest("GET", "http://sonarqubexxx/api/projects");
var outputJson = {
table: []
};
reqx.end(function (res) {
if (res.error) throw new Error(res.error);
// console.log(res.body);
var result = res.body;
//var needle = req.params.csi_id;
var needle = requestX;
var queryArray = [];
for (var i=0;i<result.length;i++) {
if (result[i].nm.indexOf(needle) !== -1) {
console.log(result[i].k);
var queryUrl = "http://sonarxxx/api/resources?resource="+result[i].k+"&metrics=code_smells,bugs,vulnerabilities,ncloc,coverage,duplicated_lines&format=json"
//console.log(queryUrl);
queryArray.push(queryUrl);
}
if (i === (result.length - 1)) {
resolve(queryArray);
}
}
});
});
}
Problem
First of all the problem with your solution is that you're trying to make everything inside a single big new Promise(...) creator.
Even if you manage to make that work it's still a common anti-pattern as Promises are made to be chained using the .then(...) method.
As pointed out by Roamer-1888 there oughta be a fork of unirest that handles Promises directly instead of requiring callbacks as in your example, but let's stick with your version of unirest here.
Solution
So what you need to be doing is create a Promise chain to handle the different steps of your code and pass the results down the chain.
Your steps seem to be:
Make the first call to retrieve initial results.
Filter the results based on the requestX input.
For each item left, make several calls to obtain more data.
Put everything back into an outputJson object.
Basically the only async steps are 1 and 3, but it might be ok to add a third step to build your outputJson and pass it downstream.
So let's start with the first step.
1. Make the first call
In the first link of the Promise chain we need to retrieve the initial results with your first unirest call:
new Promise((resolve, reject) => {
unirest("GET", "http://sonarqubexxServer/api/projects")
.end((res) => {
if (res.error) {
reject(res.error);
} else {
resolve(res.body);
}
});
})
See in this example I already checked if the response contains an error and fired a rejection in that case, otherwise I resolve the promise with the body (the data we need).
The Promise we created above will throw an error if the request fails, and will downstream the body of the response if everything goes fine.
2. Filtering and Sub-calls
Now then we can go ahead and use the full potential of Promises with the .then(...) method:
new Promise((resolve, reject) => {
unirest("GET", "http://sonarqubexxServer/api/projects")
.end((res) => {
if (res.error) {
reject(res.error);
} else {
resolve(res.body);
}
});
}).then((results) => {
results = results.filter((result) => {
return result.nm.indexOf(request) != -1;
});
return Promise.all(results.map((result) => {
return new Promise((resolve, reject) => {
var queryUrl = "http://sonarqubexxServer/api/resources?resource=" + result.k + "&metrics=code_smells,bugs,vulnerabilities,ncloc,coverage,duplicated_lines&format=json"
unirest("GET", queryUrl)
.end((res) => {
if (res.error) {
reject(res.error);
} else {
resolve(res.body);
}
});
})
}))
})
In this step I used some Array methods to make the code cleaner and Promise.all to handle several promises together.
Array.filter is a method which iterates an array and checks for each item if it should be kept in the filtered output or not. So, in your case, we want to keep only those items where result.nm.indexOf(request) != -1.
Array.map is a method which iterates an array and converts each item to something else. Basically the function you provide takes each item as input, converts it to something else and then replaces this new value to the old one in the output array.
Finally Promise.all accepts an array of Promises and returns a Promise itself. This returned Promise will resolve when all the given Promises resolve and will pass downstream an array which items are the results of each single Promise.
So by writing Promise.all(results.map((results) => { return new Promise(...) })) we convert each result in the results array into a Promise that executes the result-specific call and put it into the output array of Promises which is fed to Promise.all so they get executed at once.
3. Build the outputJSON
Now the Promise chain outputs the result of Promise.all which is an array of all the results of each Promise, which are the results of each sub-call.
We can then simply take the downstream data and use your nested iterations to build the outputJSON to be passed downstream:
new Promise((resolve, reject) => {
unirest("GET", "http://sonarqubexxServer/api/projects")
.end((res) => {
if (res.error) {
reject(res.error);
} else {
resolve(res.body);
}
});
}).then((results) => {
results = results.filter((result) => {
return result.nm.indexOf(request) != -1;
});
return Promise.all(results.map((result) => {
return new Promise((resolve, reject) => {
var queryUrl = "http://sonarqubexxServer/api/resources?resource=" + result.k + "&metrics=code_smells,bugs,vulnerabilities,ncloc,coverage,duplicated_lines&format=json"
unirest("GET", queryUrl)
.end((res) => {
if (res.error) {
reject(res.error);
} else {
resolve(res.body);
}
});
})
}))
}).then((allResults) => {
var TotalDuplicateLines = 0;
var TotalBugs = 0;
var TotalNcloc = 0;
var TotalCodeSmells = 0;
var TotalVulnerabilities = 0;
var outputJson = {
table: []
};
for (var i = 0; i < allResults; i++) {
for (var j = 0; j < allResults[i].length; j++) {
allResults[i][j].msr.forEach(m => {
if (m.key === 'duplicated_lines') {
TotalDuplicateLines += m.val;
}
else if (m.key === 'bugs') {
TotalBugs += m.val;
}
else if (m.key === 'ncloc') {
TotalNcloc += m.val;
}
else if (m.key === 'code_smells') {
TotalCodeSmells += m.val;
}
else if (m.key === 'vulnerabilities') {
TotalVulnerabilities += m.val;
outputJson.table.push({ totduplines: TotalDuplicateLines }, { totVul: TotalVulnerabilities });
}
});
}
}
return outputJson;
})
If your return this long Promise chain in your getRestSonar(request) function, then you could write getRestSonar(request).then((outputJson) => { ... do something with your outputJson ... })
I am trying solve the following challenge where I have to write a function triggerActions that passes a callback into the processAction, and produces the output:
"Process Action 1"
"Process Action 2"
...
"Process Action n"
Here is the provided function:
function processAction(i, callback) {
setTimeout(function() {
callback("Processed Action " + i);
}, Math.random()*1000);
}
Function to code:
function triggerActions(count) {
}
Note that the code for processAction cannot be altered. I was thinking of using a Promise but I'm not sure how. I believe the setTimeout is actually synchronous so I don't know if async/await would work.
My attempt:
triggerActions = count => {
let promises = [];
for(let i=1; i<=count; i++) {
promises.push(new Promise( (resolve, reject) => processAction(i, str => resolve(str))));
}
let results = []
promises.forEach( promise => Promise.resolve(promise).then( async res => results.push(await res)));
return results;
}
I kind of like short and sweet:
var n = 5
var stop = 1
triggerActions = function(text) {
if (text) console.log(text)
if (stop <= n){
processAction(stop++, triggerActions)
}
}
triggerActions()
P.S
It occurred to me that perhaps you are only allowed to provide a function which means the stop variable declaration outside the function is a problem. It makes it a little more verbose, but you can wrap it all inside the function like this:
function triggerActions(stop) {
var rFn = (text) => {
if (text) console.log(text)
if (stop <= n){
processAction(stop++, rFn)
}
}
rFn()
}
triggerActions(1)
There you go:
// Your unaltered function
function processAction(i, callback) {
setTimeout(function() {
callback("Processed Action " + i);
}, Math.random()*1000);
}
// The function you want to implement
function triggerActions(count) {
var triggerAction = function (i) { // Local function to process the given action number:
if (i <= count) { // More actions to execute?
processAction(i, function (text) {// Process current action number and pass a callback in parameter
console.log(text); // Write the result of processAction
triggerAction(i + 1); // Trigger the next action
}); //
} //
}
triggerAction(1); // First things first: start at action one
}
// Call the function
triggerActions(10);
The original poster's instinct to use promises was correct.
The two solutions above may work but because each call to triggerActions() has to wait for the delay to elapse before the next call can be made, this is considerably slow.
Maybe this is what you want but here's an optimized solution using promises and Promise.all():
const processAction = (i, callback) => {
setTimeout(function() {
callback("Processed Action " + i);
}, Math.random()*1000);
}
const triggerActions = (n) => {
const promises = [];
const generatePromise = (i) => {
return new Promise((resolve, reject) => {
processAction(i, resolve);
});
}
for (let i = 1; i <= n; i += 1) {
promises.push(generatePromise(i));
}
Promise.all(promises)
.then((strings) => strings.forEach((string) => console.log(string)));
}
triggerActions(10);
To compare the performance differences, try running the two approaches side by side.
Here's my solution:
function processAction(i, callback) {
setTimeout(function() {
callback("Processed Action " + i);
}, Math.random()*1000);
}
// Function to code:
function triggerActions(count) {
const asyncArr = [];
for (let i = 1; i <= count; i++) {
asyncArr.push(new Promise(resolve => processAction(i, resolve)));
}
Promise.all(asyncArr).then((vals) => {
vals.forEach((val) => console.log(val))
});
}
triggerActions(5);
Here is my solution using Promise.all:
function triggerActions(count) {
const promises = range(count).map(
i => new Promise(resolve => processAction(i, resolve))
);
Promise.all(promises).then(results => {
results.forEach(result => console.log(result));
});
}
// Generates an array from 1...n
function range(n) {
return Array.from({ length: n }, (_, i) => i + 1);
}
The requirements are that the function ‘processAction’ should remain unchanged and invoked in a batch.
For this I have used the util.promisify function that takes a function and converts it into a promise. A promise can be invoked in a batch with Promise.all.
Another requirement is that the callback should output “Processed Action i” where i is a number. The anonymous function ‘func’ has been defined to do this.
The triggerActions function takes a number, x, creates an array of numbers containing indices from 0 to x and then invokes a count of x asynchronous functions simultaneously.
const {promisify} = require('util');
function processAction(i, callback) {
setTimeout(function() {
callback("Processed Action " + i);
}, Math.random()*1000);
}
const func = (param1) => console.log(param1);
const promisifyedProcessAction = promisify(processAction);
async function triggerActions(count) {
const arr = [];
for(let i = 0; i < count;)
arr.push(++i);
await Promise.all(
arr.map((value) => promisifyedProcessAction(value,func)));
}
triggerActions(5);
Here's an overview of all the possible approaches:
Callback-based:
Sequential:
function triggerActions(count) {
;(function recur(i = 0) {
processAction(i, (data) => {
console.log(data)
if (i < count) {
recur(i + 1)
}
})
})()
}
Concurrent
function triggerActions(count) {
const data = Array.from({ length: count })
for (let i = 0; i < count; i++) {
processAction(i, (result) => {
data[i] = result
count--
if (count == 0) {
for (const x of data) {
console.log(x)
}
}
})
}
}
Promise-based:
We can use this function to make processAction async:
function processActionP(i) {
return new Promise((res) => processAction(i, res))
}
Sequential:
async function triggerActions(count) {
for (let i = 0; i < count; i++) {
const data = await processActionP(i)
console.log(data)
}
}
Concurrent:
async function triggerActions(count) {
const data = await Promise.all(
Array.from({ length: count }, (_, i) => processActionP(i)),
)
for (const x of data) {
console.log(x)
}
}
Concurrent, using lodash/fp
const _ = require('lodash/fp')
const triggerActions = _.pipe(
_.range(0),
_.map(processActionP),
Promise.all.bind(Promise),
data => data.then(
_.each(console.log)
),
)
So, I have function that locks like this:
function getMainData() {
var dfd = $.Deferred();
$.getJSON('My string that i pass',
function(result) {
if (result !== undefined) {
dfd.resolve(result);
}
})
return dfd.promise()
}
function getSpecificData() {
var dfd = $.Deferred();
var myArray = [];
for (var i = 0; i < 5; i++) {
getMainData().done(function(result) {
myArray.push(result)
dfd.resolve(myArray) //This is where I am lost.
})
}
return dfd.promise()
}
getSpecificData().done(function(result) {
console.log(result);
})
I think I know how promises work if you chain them together but I can not make the for-loop to wait for the async call to finish before the next iteration.
Can some please help me?
A for loop has no means of delaying the next iteration to wait for asynchronous code.
You can solve it by using a function that is called recursively instead
function getMainData() {
return $.getJSON('My string that i pass');
}
function getSpecificData() {
var myArray = [], def = new $.Deferred();
(function rec(i) {
getMainData().done(function(result) {
myArray.push(result);
if (i < 5 && result !== undefined) {
console.log(i)
rec(++i);
} else {
def.resolve(myArray);
}
});
})(0);
return def.promise();
}
getSpecificData().done(function(result) {
console.log(result);
});
You should instead push all promises into an array and wait for all to finish.
function getMainData() {
return $.getJSON('My string that i pass');
}
function getSpecificData() {
var promiseArray = [];
for (var i = 0; i < 5; i++) {
promiseArray.push(getMainData());
}
return $.when.apply($, promiseArray);
}
getSpecificData().done(function(result) {
console.log(result);
})
Have you tried without promise like :
var myArray = [];
var cpt=0;
var total=5;
getMainData();
console.log(myArray);
function getMainData()
{
$.getJSON('My string that i pass', function(result) {
if(cpt<total)
{
myArray.push(result);
cpt++;
getMainData();
}
})
}
Hope this helps.
here's the code I currently using,
function loopArrayWithAsync(array, doSthWithElement, finalCallback) {
var count = 0;
var _data = [];
var _errs = [];
for (var i = 0; i < array.length; i++) {
doSthWithElement(array[i], function (err, data) {
count++;
if (err) {
_errs.push(err);
}
if (data) {
_data.push(data);
}
if (count === data.length) {
finalCallback(_errs, _data);
}
}
}
}
then, I will use the function in this way:
loopArrayWithAsync(array, function (element, finish) {
// element - element in the array
asyncFunc(element, function (err, result) {
if (err) {
finish(err);
} else {
finish(null, result);
}
});
}, function (errs, finalData) {
// when the for loop is finished,
// i.e. (count === data.length)
// this callback will be executed
// errs - array of err from the above callback function
// finalData - array of result from the above callback function
outerCallback(errs, finalData);
});
with this implementation, I can loop through an array with async function and execute the callback function when all elements in the array have been processed.
but now I want to add a delay/interval feature to loopArrayWithAsync()
something like loopArrayWithAsync(array, {interval : 1000}, function (element, finish) {..., after it processed the first element, it should wait for 1000ms, then starts to process the second element, and vice versa...
I've found another question talking about adding delay to for loop
but it seems to be more complicated while dealing with async functions.
Any answers will be appreciated
============================== update ==============================
this is the function after refactoring,
function loopArrayWithAsync(array, options, doSthWithElement, finalCallback) {
if (isFunction(options)) {
finalCallback = doSthWithElement;
doSthWithElement = options;
options = {};
}
options.interval = options.interval || 0;
options.oneByOne = options.oneByOne || false;
var _data = [];
var _errs = [];
var count = 0;
var length = array.length;
var i = 0;
(function handleIteration() {
if (i < length) {
var element = array[i];
doSthWithElement(element, function (err, data) {
if (err) {
_errs.push(err);
}
if (data) {
_data.push(data);
}
count++;
if (count === length) {
finalCallback(_errs, _data);
} else if (options.oneByOne) {
if (options.interval) {
setTimeout(handleIteration, options.interval);
} else {
process.nextTick(handleIteration);
}
}
});
i++;
if (!options.oneByOne) {
if (options.interval) {
setTimeout(handleIteration, options.interval);
} else {
process.nextTick(handleIteration);
}
}
}
}());
};
so that I can use the function in this way now:
loopArrayWithAsync(array, {interval : 1000}, function (element, finish) {
asyncFunc(element, function (err, result) {
if (err) {
finish(err);
} else {
anotherAsyncFunc(result, function (err, doc) {
if (err) {
finish(err);
} else {
finish(null, doc);
}
});
}
});
}, function (errs, finalData) {
outerCallback(errs, finalData);
});
or
loopArrayWithAsync(array, {oneByOne : true}, function (element, finish) {...
loop through the elements one by one
loopArrayWithAsync(array, {interval : 5000, oneByOne : true}, function (element, finish) {...
loop through the elements one by one and with 5 seconds delay
available options :
interval is the amount of milliseconds between each iteration, default : 0
If oneByOne is true, the method would only proceed to the next element until finish has been invoked for the current element, default : false
The code suits my case now, but I will still try the suggested libraries to make life easier, thank you
Please leave a comment if you found that the code can be further improved, looking forward to any suggestions!
As suggested by #thefourtheye you can use the concept of Promises, and Bluebird is a fast and good library for this. Promise.settle lets you resolve and reject your promises and then inspect the result.
function loopArray(array) {
var arrayOfPromises = [];
for (var i = 0; i < array.length; i++) {
arrayOfPromises.push(doSomethingAsync(array[i]));
}
Promise.settle(arrayOfPromises).then(function (results) {
console.log("All async calls done! You can inspect the result!");
console.log(results);
});
}
function doSomethingAsync(item) {
return new Promise(function(resolve, reject){
//Do you async work here!
console.log("Entering async function call " + item);
if(item === "three"){
reject("bad value!");
}
resolve(item + " promise done!");
});
}
loopArray(["one","two","three"]);
I made a JSFiddle of the below example. Working with asynchronous functions, promises can help you out a lot, so I'd really suggest you look into it.
You can use a local function to make the asynchronous loop. For the next iteration the function calls itself with a delay:
function loopArrayWithAsync(array, doSthWithElement, finalCallback, delay) {
var _data = [], _errs = [], i = 0;
loop();
function loop() {
doSthWithElement(array[i], function (err, data) {
if (err) {
_errs.push(err);
}
if (data) {
_data.push(data);
}
i++;
if (i === array.length) {
finalCallback(_errs, _data);
} else {
window.setTimeout(loop, delay);
}
}
}
}
To start the calls at a certain interval instead of having a delay between calls, just use setInterval with different times:
function loopArrayWithAsync(array, doSthWithElement, finalCallback, delay) {
var _data = [], _errs = [], count = 0;
for (var i = 0; i < array.length; i++) {
window.setTimeout(function() {
doSthWithElement(array[i], function (err, data) {
if (err) {
_errs.push(err);
}
if (data) {
_data.push(data);
}
count++;
if (count === array.length) {
finalCallback(_errs, _data);
}
});
}, i * delay);
}
}