Ajax call inside of loop needs to be synchronized - javascript

I have a coding issue where I want to loop thru and call an ajax call but I dont want another request to be sent until the first one is complete. I have tried setting it to asyc = false and adding an onsuccess callback. But it seems like the loop is continuing to run which gives me responses out of order and parallel requests.
// This function is used to generate a numeric val and passes it along in the success callback
function duplicateOmsid(totalAmount, omsid) {
var url = '/portal/GetBulkCopyAmountServlet';
var errorString;
new Ajax.Request(
url, {
method: 'post',
parameters: {
totalAmount: totalAmount,
omsid: omsid
},
async: false,
onSuccess: function(transport) {
dataResponse = transport.responseText.evalJSON();
createWorkflow(totalAmount, omsid, dataResponse);
},
.....
// Function used to loop thru and call the duplicate workflow ajax call
function createWorkflow(totalAmount, omsid, bulkAmount) {
var amountProccessed = 0;
for( i = 0; amountProccessed < totalAmount; i++ ) { // Loop through source
var duplicateAmt;
if (totalAmount < 11){
duplicateAmt = totalAmount
}else{
duplicateAmt = amountProccessed + dataResponse < totalAmount ? dataResponse : totalAmount - amountProccessed
}
duplicateWorkflow(totalAmount, omsid, duplicateAmt, amountProccessed);
amountProccessed += bulkAmount;
}
}
// Function used to create the workflow ajax call - the success handler is updating the user.
function duplicateWorkflow( totalAmount, omsid, bulkAmount, amountProccessed){
amountProccessed += bulkAmount;
var url = '/portal/CreateWorkFlowServlet';
var errorString;
new Ajax.Request(
url, {
method: 'post',
parameters: {
totalAmount: totalAmount,
omsid: omsid,
bulkAmount: bulkAmount
},
async: false,
onSuccess: function(transport) {
var div = document.getElementById('progress');
if( amountProccessed > totalAmount){
div.innerHTML = totalAmount + ' out of ' + totalAmount + ' Processed ' ;
alert (totalAmount + 'Items successfully duplicated ')
}else{
div.innerHTML = amountProccessed + ' out of ' + totalAmount + ' Processed ' ;
}
},
onFailure: function(e) {
}
},
onException: function(e) {
}
},
});
}

As a rule of thumb, the way to sequentialize async code using raw Javascript is to use recursion instead of a for loop.
var urls = [ /*...*/ ];
function loop(i, onDone){
if(i >= urls.length){
//base case
onDone( theResultOfProcessingTheAjaxRequests );
}else{
Ajax.Request(urls[i], {
onsuccess: function(){
loop(i+1, onDone);
}
});
}
}
loop(0, function(result){
console.log("all done");
});
Note that I converted i to a function parameter, to keep it scoped to the looping function. If you wanted, you could declare it outside, just like you did in the for loop:
var urls = [ /*...*/ ];
var i = 0;
function loop(onDone){
//...
i = i+1;
loop(onDone);
}
Additionally, I added an "onDone" callback to the looping function to help the async code look a bit more like the sync version. The idea is that by using a return callback, the loop function doesn't need to know what function called it and where it should jump to after its done its job - in the end, calling onDone(x) is a bit similar to doing return x. Of course, you could have hardcoded the return function if you wanted.
function afterAjax(){
console.log("all done");
}
function loop(){
if(i >= urls.length){
afterAjax();
}
//...
}
loop();
Finally, coding recursive loops like this is a bit annoying and there are many libraries out there that provide functions to encapsulate these hight level sequentialization and parallelization patterns. In particular, error handling (try-catch) is specially hard to do by hand with callbacks. If you are doing any more non-tricial Async stuff I would highly recommend looking into some of these libraries.

Related

Wait until all Ajax requests in a for loop are done before moving on?

I have to make a call to the Agile Central API to get a list of defect suites and then iterate through the list and make a nested call to get the list of defects in each suite, the nested call depends on the outer call. I then have to append the rows of data to a table and then call doneCallback() to signal the end of data collection. The problem I'm having is that doneCallback() is being called before the requests have completed so none of the data is actually passed on
I've tried the approaches in this post: Wait until all jQuery Ajax requests are done? and this post: how to wait until Array is filled (asynchronous). In the console I can see that all the data I want is there but nothing gets appended. My question is: how can I make sure I don't call doneCallback() until all the requests that are made in the loop have finished and pushed the data? Here's my code right now:
function getSuites() {
return $.ajax({
url: suitesURL("71101309592") + "&fetch=Name,FormattedID,Defects",
type: "GET",
xhrFields: {
withCredentials: true
},
headers: {
"zsessionid": apiKey
}
});
}
function getDefects(_ref) {
return $.ajax({
url: _ref,
type:"GET",
xhrFields: {
withCredentials: true
},
headers: {
"zsessionid": apiKey
}
});
}
// Download the data
myConnector.getData = function (table, doneCallback) {
console.log("Getting Data...");
var ajaxCalls = [], tableData = [];
var suitesJSON = getSuites();
suitesJSON.done(function(data) {
var suites = data.QueryResult.Results;
for(var i = 0; i < suites.length; i++) {
(function(i) {
var defectsJSON = getDefects(suites[i].Defects._ref + "?fetch=Name,FormattedID,State,Priority,CreationDate,c_RootCause,c_RootCauseCRM");
ajaxCalls.push(defectsJSON);
defectsJSON.done(function(data) {
var defects = data.QueryResult.Results;
for(var j = 0; j < defects.length; j++) {
tableData.push({
"suiteName": suites[i].Name, // This is the name of the suite collected in the outer call
"defectName": defects[j].Name,
"FormattedID": defects[j].FormattedID,
"State": defects[j].State,
"Priority": defects[j].Priority,
"CreationDate": defects[j].CreationDate,
"RootCause": defects[j].c_RootCause,
"RootCauseCRM": defects[j].c_RootCauseCRM
});
}
});
})(i);
}
});
$.when.apply($, ajaxCalls).then(function() {
console.log(tableData);
table.appendRows(tableData);
doneCallback();
});
};
You should use a better model to get multiple items. Using a for loop to query for multiple gets is the problem, and the solution should be to refactor so that you make one request that returns everything you need.
If this doesn't seem possible to you, I've researched a way to do what you want in jQuery.
$.when(
$.get(path, callback), $.get(path, callback), $.get(path, callback)
.then({
//This is called after all requests are done
});
You could create an array of all your requests like [$.get(path, callback), request2, request 3, etc...] and then use the spread method to put them as arguments like
var args = [$.get(path, callback), request2, request 3, etc...];
$.when(...args).then(() => {/*call here*/});
This link has the rest of the information
https://css-tricks.com/multiple-simultaneous-ajax-requests-one-callback-jquery/
I think the problem is that you are calling $.wait right after getSuites() is executed.
$.wait 'sees' the ajaxCalls array empty (because getSuites() hasn't finish yet) and executes doneCallback().
Try to call $.wait INSIDE the suitesJSON.done function, that way it will be called after the ajaxCalls array is filled with the first response:
myConnector.getData = function (table, doneCallback) {
console.log("Getting Data...");
var ajaxCalls = [], tableData = [];
var suitesJSON = getSuites();
suitesJSON.done(function(data) {
var suites = data.QueryResult.Results;
for(var i = 0; i < suites.length; i++) {
(function(i) {
var defectsJSON = getDefects(suites[i].Defects._ref + "?fetch=Name,FormattedID,State,Priority,CreationDate,c_RootCause,c_RootCauseCRM");
ajaxCalls.push(defectsJSON);
defectsJSON.done(function(data) {
var defects = data.QueryResult.Results;
for(var j = 0; j < defects.length; j++) {
tableData.push({
"suiteName": suites[i].Name, // This is the name of the suite collected in the outer call
"defectName": defects[j].Name,
"FormattedID": defects[j].FormattedID,
"State": defects[j].State,
"Priority": defects[j].Priority,
"CreationDate": defects[j].CreationDate,
"RootCause": defects[j].c_RootCause,
"RootCauseCRM": defects[j].c_RootCauseCRM
});
}
});
})(i);
}
$.when.apply($, ajaxCalls).then(function() {
console.log(tableData);
table.appendRows(tableData);
doneCallback();
});
});
};

How to call a function associated with ajax within the loop in javascript?

I am running a loop & within the loop I am calling a function. The function makes an ajax call & returns response but the problem is loop is not waiting for the response therefore the function returns undefined.
function checkBidStatus(index) {
$.ajax({
type:'get',
url:'/orderbook/checkorderdetails',
success: function(data){
$parsedData = JSON.parse(data);
if($parsedData[index]) {
return "Hello";
} else {
return "Bye";
}
//$("#bid-details .modal-dialog").html(data);
},
error: function (response) {
console.log(response);
}
});
}
let content = '';
for(let i = 1; i <= 10; i++ ) {
content += '<div>' +
(checkBidStatus(i)) +
'</div>';
}
The problem is that the for loop is synchronous, but the ajax call is asynchronous. I.e., that function has to wait for the server to respond. Therefore, callbacks or promises need to be used by the caller before iterating to the next request.
There are elegant ways to accomplish this task using certain libraries, such as async, but the "quick vanillaJS" way to solve this task using promises would be something like this:
const contentTemplate = '<div>[BID_RESULT]</div>';
const nRequests = 10;
var requestCnt = 0;
var content = '';
function checkBidStatus(index) {
return new Promise((resolve, reject) => {
$.ajax({
type:'get',
url:'/orderbook/checkorderdetails',
success: function(data){
$parsedData = JSON.parse(data);
resolve({ index: index + 1, msg: $parsedData ? "Hello" : "Bye" });
},
error: reject
});
// <-- NOTE: there is no return statement at this line. Hence if
// you were to call this function from your for loop, you'd see
// undefined
}
}
Then, you can keep calling that function recursively:
function multipleRequests(){
checkBidStatus(requestCnt)
.then((resp) => { // This runs if the promise RESOLVES (i.e., success)
content = contentTemplate.replace('[BID_RESULT]', resp.msg);
// --- [then do what you want with content] --- //
// Request again
requestCnt++;
if( requestCnt < nRequests ){
multipleRequests();
}
})
.catch(console.error) // This runs if promise rejects (i.e., has an error)
}
// Run:
multipleRequests();
Why not use a Promise inside and wait on that promise to get the required result, after which you update the DOM? In your checkBidStatus method, you can create a promise and return that. When the ajax result is available, resolve the promise with ajax result.
The caller of the method would need to wait for the promise to resolve and then update the DOM.
let content = '';
for(let i = 1; i <= 10; i++ ) {
checkBidStatus(i).then(result => {
content += '<div>' +result + '</div>';
})
}
Note: I have not tested the code, but something similar should work. Also the order of divs created base on the index is not guaranteed, so you may need to take care of that, if order is important.
You should wait till response from server for every iteration in loop.
function checkBidStatus(index) {
$.ajax({
type: 'get',
url: '/orderbook/checkorderdetails',
success: function(data) {
$parsedData = JSON.parse(data);
if ($parsedData[index]) {
return {
msg: 'Hello',
index: index + 1,
};
} else {
return {
msg: 'Bye',
index: index + 1,
};
}
//$("#bid-details .modal-dialog").html(data);
},
error: function(response) {
console.log(response);
},
});
}
let content = '';
let i = 1;
while (i <= 10) {
let { msg, index } = checkBidStatus(i);
content += '<div>' + msg + '</div>';
i = index;
}

How to stop $.ajax within for loop?

I'm running into an issue where my api call reaches it's timeout limit, but continues to loop for the remainder of the requests provided resulting in n number of timeout logs in the console (In this case 5). I want it so that I can do something along the lines of a break; and just exit entirely so the remaining calls don't get logged. E.g. If the call immediately times out, only one timeout log will be logged instead of the current 5 and none of the five api requests will be made.
let qs = {
requests: 5,
timeout: 1000
};
let prices = [];
let highest = 0;
const url = 'http://somedata.com';
function xhr(qs){
return $.ajax({
url: url,
dataType: 'json',
timeout: qs.timeout,
success: function (data) {
let json = JSON.stringify(data['price']);
prices.push(json);
getHighestPrice(prices);
console.log(highest);
},
error: function(e, textstatus, message) {
if(textstatus==="timeout") {
console.error(textstatus);
} else {
console.error(textstatus);
}
}
});
}
function makeRequest(qs) {
for(let i = 0; i < qs.requests; i++) {
xhr(qs);
}
}
function getHighestPrice(arr) {
for(let i = 0; i <= arr.length; i++) {
if (arr[i] > highest) {
highest = arr[i]
}
}
return highest;
}
makeRequest(qs);
Your code makes all the requests at once
It should be noted that this code will stop the "chaining" once any error occurs in $.ajax, not just timeout - if that's not the required behaviour, there is a little more to do
To make the call only if the previous is successful, you can chain the promises returned by $.ajax
let qs = {
requests: 5,
timeout: 1000
};
let prices = [];
let highest = 0;
function xhr(qs){
return $.ajax({
url: url,
dataType: 'json',
timeout: qs.timeout,
success: function (data) {
let json = JSON.stringify(data['price']);
prices.push(json);
getHighestPrice(prices);
console.log(highest);
},
error: function(e, textstatus, message) {
if (textstatus==="timeout") {
console.error(textstatus);
} else {
console.error(textstatus);
}
}
});
}
function makeRequest(qs) {
let p = $.when();
for(let i = 0; i < qs.requests; i++) {
p = p.then(() => xhr(qs));
}
}
as others have pointed out you don't need to pass qs to xhr, however, I'm assuming the code you posted may be simplified so have not removed the qs argument
An alternative would be
let qs = {
requests: 5,
timeout: 1000
};
let prices = [];
let highest = 0;
function xhr(qs){
return $.ajax({
url: url,
dataType: 'json',
timeout: qs.timeout
}).then(data => {
let json = JSON.stringify(data['price']);
prices.push(json);
getHighestPrice(prices);
console.log(highest);
});
}
function makeRequest(qs) {
let p = $.when([]);
for(let i = 0; i < qs.requests; i++) {
p = p.then(() => xhr(qs));
// or p.then(xhr); if you don't need to pass qs on to xhr function (remove qs argument in xhr as well)
}
p.then(() => {
// this is run once all have completed
}).fail(reason => {
// this is run if there's a failure anywhere
});
}
Since it is a callback, it will be executed asynchronously. So even if you throw an error from one of the callback you provided, the rest will be executed later or sooner. One of the solution I could think of is to have a flag that will be set to true if one of the AJAX causes an error. Something like:
var hasError = false;
$.ajax({
error: function (e, textstatus, message) {
if (textstatus === "timeout") {
if (!hasError) console.error(textstatus);
hasError = true;
}
}
});
Using Promise.all() can simplify this use case. If you cannot use promises try throwing an exception from the error handler. Like so:
$.ajax({
error: function (e, textstatus, message) {
if (textstatus === "timeout") throw e
}
})
Be sure to catch the the exception:
function makeRequest(qs) {
try {
for(let i = 0; i < qs.requests; i++) {
xhr(qs);
}
} catch (e) { // Handle error here }
}
To get the desired behavior you will have to make all of the calls sequentially i.e. you can't start the next call until the previous one has finished (otherwise you won't know if it has failed or not).
You could use the done callback to determine whether the next call should be made:
function makeRequest(i) {
xhr().done(function(){
if (i < qs.requests){
makeRequest(i+1)
}
})
}
makeRequest(0); // Kick things off here
Also, you don't need to pass the qs variable into the makeRequest or xhr functions. It doesn't change throughout the calls so just use it as-is within the xhr function without passing it around.

How can I control functions to execute in order?

I am newbie in Javascript. I am trying to manage couple of function in order. But when it gets to API calls it dosen't wait for response and go backs to continue its execution and makes my code messy. This is a sample:
function readFacebook()
{
var myID = getMyID();
console.log("myID= " + myID);
}
function getMyID(){
FB.api('/me', function(response) {
console.log("response.id= "+response.id);
return(response.id);
});
}
The output is completely strange. First
console.log("myID= " + myID);
show output and then
console.log("response.id= "+response.id);
will be called. Anyone can explain how I can force it to implement in order. I meant program should wait until response from facebook instead of working asynchronously!
EDIT
I actually call 3 time API from main function to three sub functions. How can I organise this:
function getMyID(){
FB.api('/me', function(response) {
console.log("response.id= "+response.id);
return(response.id);
});
}
function readFacebookEvent(id)
{
var myID = getMyID();
console.log("myID= " + myID);
FB.api('/me/events', function(response) {
for(i=0; i<response.data.length;i++) {
var str;
var eventID = response.data[i].id;
getEvent(eventID,myID);
}
});
}
function getEvent(eventID,myID){
FB.api("/"+ eventID , function (response3) {
if (response3 && !response3.error) {
//console.log(response3);
var date = new Date((response3.start_time || "").replace(/-/g,"/").replace(/[TZ]/g," "));
var diff = (((new Date()).getTime() - date.getTime()) / 1000);
//console.log(diff);
if(myID == response3.owner.id && diff < 0 )
{
//console.log("found= " + myID);
var t = getImage(eventID);
if(t)
{
console.log("TRUE");
}
else
{
console.log("false");
}
}
}
});
}
function getImage(eventID){
//console.log("******eventID== "+eventID);
FB.api("/"+eventID+"/picture",
{
"redirect": false,
"type": "normal"
},function (response2) {
if (response2 && !response2.error) {
str="<br/><b>Pic</b> : <img src='"+response2.data.url+"'/>";
//console.log("response2.data.url= "+response2.data.url);
//str +="<b>name: </b>"+response3.name+"<br>";
document.getElementById("status2").innerHTML+=str;
return true;
}
else
{
return false;
}
});
}
As you mentioned the call to facebook's api is asynchronous meaning that you cannot be certain to know when the resulting data will come back from your request. You can work around and guarantee the order you're looking for by using a callback:
function readFacebook(id) {
console.log("myID= " + id);
}
function getMyID(cb) {
FB.api('/me', function(response) {
console.log("response.id= "+response.id);
cb(response.id);
});
}
getMyID(readFacebook);
What's happening here is that the call to FB.api accepts a callback that fires when the response comes back from the server. Since we're supplying our own callback to getMyID, we can use that to get access to response.id once the server responds.

Parallel asynchronous Ajax requests using jQuery

I'd like to update a page based upon the results of multiple ajax/json requests. Using jQuery, I can "chain" the callbacks, like this very simple stripped down example:
$.getJSON("/values/1", function(data) {
// data = {value: 1}
var value_1 = data.value;
$.getJSON("/values/2", function(data) {
// data = {value: 42}
var value_2 = data.value;
var sum = value_1 + value_2;
$('#mynode').html(sum);
});
});
However, this results in the requests being made serially. I'd much rather a way to make the requests in parallel, and perform the page update after all are complete. Is there any way to do this?
jQuery $.when() and $.done() are exactly what you need:
$.when($.ajax("/page1.php"), $.ajax("/page2.php"))
.then(myFunc, myFailure);
Try this solution, which can support any specific number of parallel queries:
var done = 4; // number of total requests
var sum = 0;
/* Normal loops don't create a new scope */
$([1,2,3,4,5]).each(function() {
var number = this;
$.getJSON("/values/" + number, function(data) {
sum += data.value;
done -= 1;
if(done == 0) $("#mynode").html(sum);
});
});
Run multiple AJAX requests in parallel
When working with APIs, you sometimes need to issue multiple AJAX requests to different endpoints. Instead of waiting for one request to complete before issuing the next, you can speed things up with jQuery by requesting the data in parallel, by using jQuery's $.when() function:
JS
$.when($.get('1.json'), $.get('2.json')).then(function(r1, r2){
console.log(r1[0].message + " " + r2[0].message);
});
The callback function is executed when both of these GET requests finish successfully. $.when() takes the promises returned by two $.get() calls, and constructs a new promise object. The r1 and r2 arguments of the callback are arrays, whose first elements contain the server responses.
Here's my attempt at directly addressing your question
Basically, you just build up and AJAX call stack, execute them all, and a provided function is called upon completion of all the events - the provided argument being an array of the results from all the supplied ajax requests.
Clearly this is early code - you could get more elaborate with this in terms of the flexibility.
<script type="text/javascript" src="http://jqueryjs.googlecode.com/files/jquery-1.3.2.min.js"></script>
<script type="text/javascript">
var ParallelAjaxExecuter = function( onComplete )
{
this.requests = [];
this.results = [];
this.onComplete = onComplete;
}
ParallelAjaxExecuter.prototype.addRequest = function( method, url, data, format )
{
this.requests.push( {
"method" : method
, "url" : url
, "data" : data
, "format" : format
, "completed" : false
} )
}
ParallelAjaxExecuter.prototype.dispatchAll = function()
{
var self = this;
$.each( self.requests, function( i, request )
{
request.method( request.url, request.data, function( r )
{
return function( data )
{
console.log
r.completed = true;
self.results.push( data );
self.checkAndComplete();
}
}( request ) )
} )
}
ParallelAjaxExecuter.prototype.allRequestsCompleted = function()
{
var i = 0;
while ( request = this.requests[i++] )
{
if ( request.completed === false )
{
return false;
}
}
return true;
},
ParallelAjaxExecuter.prototype.checkAndComplete = function()
{
if ( this.allRequestsCompleted() )
{
this.onComplete( this.results );
}
}
var pe = new ParallelAjaxExecuter( function( results )
{
alert( eval( results.join( '+' ) ) );
} );
pe.addRequest( $.get, 'test.php', {n:1}, 'text' );
pe.addRequest( $.get, 'test.php', {n:2}, 'text' );
pe.addRequest( $.get, 'test.php', {n:3}, 'text' );
pe.addRequest( $.get, 'test.php', {n:4}, 'text' );
pe.dispatchAll();
</script>
here's test.php
<?php
echo pow( $_GET['n'], 2 );
?>
Update: Per the answer given by Yair Leviel, this answer is obsolete. Use a promise library, like jQuery.when() or Q.js.
I created a general purpose solution as a jQuery extension. Could use some fine tuning to make it more general, but it suited my needs. The advantage of this technique over the others in this posting as of the time of this writing was that any type of asynchronous processing with a callback can be used.
Note: I'd use Rx extensions for JavaScript instead of this if I thought my client would be okay with taking a dependency on yet-another-third-party-library :)
// jQuery extension for running multiple async methods in parallel
// and getting a callback with all results when all of them have completed.
//
// Each worker is a function that takes a callback as its only argument, and
// fires up an async process that calls this callback with its result.
//
// Example:
// $.parallel(
// function (callback) { $.get("form.htm", {}, callback, "html"); },
// function (callback) { $.post("data.aspx", {}, callback, "json"); },
// function (formHtml, dataJson) {
// // Handle success; each argument to this function is
// // the result of correlating ajax call above.
// }
// );
(function ($) {
$.parallel = function (anyNumberOfWorkers, allDoneCallback) {
var workers = [];
var workersCompleteCallback = null;
// To support any number of workers, use "arguments" variable to
// access function arguments rather than the names above.
var lastArgIndex = arguments.length - 1;
$.each(arguments, function (index) {
if (index == lastArgIndex) {
workersCompleteCallback = this;
} else {
workers.push({ fn: this, done: false, result: null });
}
});
// Short circuit this edge case
if (workers.length == 0) {
workersCompleteCallback();
return;
}
// Fire off each worker process, asking it to report back to onWorkerDone.
$.each(workers, function (workerIndex) {
var worker = this;
var callback = function () { onWorkerDone(worker, arguments); };
worker.fn(callback);
});
// Store results and update status as each item completes.
// The [0] on workerResultS below assumes the client only needs the first parameter
// passed into the return callback. This simplifies the handling in allDoneCallback,
// but may need to be removed if you need access to all parameters of the result.
// For example, $.post calls back with success(data, textStatus, XMLHttpRequest). If
// you need textStatus or XMLHttpRequest then pull off the [0] below.
function onWorkerDone(worker, workerResult) {
worker.done = true;
worker.result = workerResult[0]; // this is the [0] ref'd above.
var allResults = [];
for (var i = 0; i < workers.length; i++) {
if (!workers[i].done) return;
else allResults.push(workers[i].result);
}
workersCompleteCallback.apply(this, allResults);
}
};
})(jQuery);
UPDATE And another two years later, this looks insane because the accepted answer has changed to something much better! (Though still not as good as Yair Leviel's answer using jQuery's when)
18 months later, I just hit something similar. I have a refresh button, and I want the old content to fadeOut and then the new content to fadeIn. But I also need to get the new content. The fadeOut and the get are asynchronous, but it would be a waste of time to run them serially.
What I do is really the same as the accepted answer, except in the form of a reusable function. Its primary virtue is that it is much shorter than the other suggestions here.
var parallel = function(actions, finished) {
finishedCount = 0;
var results = [];
$.each(actions, function(i, action) {
action(function(result) {
results[i] = result;
finishedCount++;
if (finishedCount == actions.length) {
finished(results);
}
});
});
};
You pass it an array of functions to run in parallel. Each function should accept another function to which it passes its result (if any). parallel will supply that function.
You also pass it a function to be called when all the operations have completed. This will receive an array with all the results in. So my example was:
refreshButton.click(function() {
parallel([
function(f) {
contentDiv.fadeOut(f);
},
function(f) {
portlet.content(f);
},
],
function(results) {
contentDiv.children().remove();
contentDiv.append(results[1]);
contentDiv.fadeIn();
});
});
So when my refresh button is clicked, I launch jQuery's fadeOut effect and also my own portlet.content function (which does an async get, builds a new bit of content and passes it on), and then when both are complete I remove the old content, append the result of the second function (which is in results[1]) and fadeIn the new content.
As fadeOut doesn't pass anything to its completion function, results[0] presumably contains undefined, so I ignore it. But if you had three operations with useful results, they would each slot into the results array, in the same order you passed the functions.
you could do something like this
var allData = []
$.getJSON("/values/1", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
$.getJSON("/values/2", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
var processData = function(data){
var sum = data[0] + data[1]
$('#mynode').html(sum);
}
Here's an implementation using mbostock/queue:
queue()
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 1}), delay: 1}, function(data) {
callback(null, data.value);
});
})
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 3}), delay: 2}, function(data) {
callback(null, data.value);
});
})
.awaitAll(function(err, results) {
var result = results.reduce(function(acc, value) {
return acc + value;
}, 0);
console.log(result);
});
The associated fiddle: http://jsfiddle.net/MdbW2/
With the following extension of JQuery (to can be written as a standalone function you can do this:
$.whenAll({
val1: $.getJSON('/values/1'),
val2: $.getJSON('/values/2')
})
.done(function (results) {
var sum = results.val1.value + results.val2.value;
$('#mynode').html(sum);
});
The JQuery (1.x) extension whenAll():
$.whenAll = function (deferreds) {
function isPromise(fn) {
return fn && typeof fn.then === 'function' &&
String($.Deferred().then) === String(fn.then);
}
var d = $.Deferred(),
keys = Object.keys(deferreds),
args = keys.map(function (k) {
return $.Deferred(function (d) {
var fn = deferreds[k];
(isPromise(fn) ? fn : $.Deferred(fn))
.done(d.resolve)
.fail(function (err) { d.reject(err, k); })
;
});
});
$.when.apply(this, args)
.done(function () {
var resObj = {},
resArgs = Array.prototype.slice.call(arguments);
resArgs.forEach(function (v, i) { resObj[keys[i]] = v; });
d.resolve(resObj);
})
.fail(d.reject);
return d;
};
See jsbin example:
http://jsbin.com/nuxuciwabu/edit?js,console
The most professional solution for me would be by using async.js and Array.reduce like so:
async.map([1, 2, 3, 4, 5], function (number, callback) {
$.getJSON("/values/" + number, function (data) {
callback(null, data.value);
});
}, function (err, results) {
$("#mynode").html(results.reduce(function(previousValue, currentValue) {
return previousValue + currentValue;
}));
});
If the result of one request depends on the other, you can't make them parallel.
Building on Yair's answer.
You can define the ajax promises dynamically.
var start = 1; // starting value
var len = 2; // no. of requests
var promises = (new Array(len)).fill().map(function() {
return $.ajax("/values/" + i++);
});
$.when.apply($, promises)
.then(myFunc, myFailure);
Suppose you have an array of file name.
var templateNameArray=["test.html","test2.html","test3.html"];
htmlTemplatesLoadStateMap={};
var deffereds=[];
for (var i = 0; i < templateNameArray.length; i++)
{
if (!htmlTemplatesLoadStateMap[templateNameArray[i]])
{
deferreds.push($.get("./Content/templates/" +templateNameArray[i],
function (response, status, xhr) {
if (status == "error") { }
else {
$("body").append(response);
}
}));
htmlTemplatesLoadStateMap[templateNameArray[i]] = true;
}
}
$.when.all(deferreds).always(function(resultsArray) { yourfunctionTobeExecuted(yourPayload);
});
I needed multiple, parallel ajax calls, and the jquery $.when syntax wasn't amenable to the full $.ajax format I am used to working with. So I just created a setInterval timer to periodically check when each of the ajax calls had returned. Once they were all returned, I could proceed from there.
I read there may be browser limitations as to how many simultaneous ajax calls you can have going at once (2?), but .$ajax is inherently asynchronous, so making the ajax calls one-by-one would result in parallel execution (within the browser's possible limitation).

Categories

Resources