javascript wait for multiple async ajax response is not working - javascript

I have been trying to call multiple ajax call in async mode and then waiting for all the ajax calls to complete before proceeding.
I am using jquery .when().
var results_array = [];
var num = 0;
var promises = [];
ldap_cmd_array.forEach(element => {
var myldap = ldap_data;
myldap.push({
"name": "cmd",
"value": element
});
console.log(++num);
promises.push(ajaxCall(myldap, 'aaa',
// success callback
function (data) {
console.log(--num);
results_array.push(data);
console.log('pass');
},
//error callback
function (err) {
//Do nothing
console.log(--num);
console.log('fail');
}
));
});
$.when.apply($, promises)
.then(function() {
console.log(results_array);
});
But in the output, I see that results_array is printing before all the ajax call is completed. I'm not sure where I am going wrong. Need help? Thanks in advance.
Note: output image is attached.
Chrome browser console output

Finally I was able to solve it.
Thanks to Zim84, your pointer actually solved my problem, kudos!!
var results_array = [];
var num = 0;
var promises = [];
console.log(ldap_data);
ldap_cmd_array.forEach(element => {
var myldap = ldap_data.slice(); //to copy a javascript object
myldap.push({
"name": "cmd",
"value": element
});
var dObject = new $.Deferred();
console.log(++num);
promises.push(dObject);
ajaxCall(myldap, 'taaa',
// success callback
function (data) {
console.log(--num);
dObject.resolve();
results_array.push(data);
console.log('pass');
},
//error callback
function (err) {
//Do nothing
dObject.resolve();
console.log(--num);
console.log('fail');
}
);
});
$.when.apply($, promises)
.then(function () {
console.log('I should print after all promises');
console.log(results_array);
});

Promise.all([ajaxCall1, ..., ajaxCallN]).then(responseArray => {})

Related

How to return value from an ajax call function

I have a function
function GetITStaffList(){
var go_path = "Server/ITComplains.php?action=GetITStaffList&vars=0";
$jqLibrary.get(go_path,
{}, function(data)
{
var parseData = JSON.parse(data);
console.log("GetPendingAndInProgressComplainsByGeneratorId : ", parseData);
return parseData;
});
}
I am calling this somewhere in the code like this
var ITStaffList = GetITStaffList();
MakeDropDownITStaff(ITStaffList);
But the problem is each time it is returning null. I know that I have to use callback and something like a promise but I don't how to fit this thing in my context. How do I write a reusable function with ajax call that returns data on demand.?
Return a promise instead.
function GetITStaffList(){
return new Promise(function(resolve){
var go_path = "Server/ITComplains.php?action=GetITStaffList&vars=0";
$jqLibrary.get(go_path,
{}, function(data)
{
var parseData = JSON.parse(data);
console.log("GetPendingAndInProgressComplainsByGeneratorId : ", parseData);
resolve(parseData); //Notice this
});
})
}
Now you can call the function and wait for data.
GetITStaffList().then(function(data){
console.log(data)
})

How to execute multiple ajax calls and get the result?

Hi I want to execute a batch of ajax calls and get the response and then render the results for the user.
I'm using this code but it is not working because the render function executes before all the ajax responses have been collected.
serviceQuery: function (id) {
return $.getJSON(SERVICEURL + "/", id);
},
queryService: function(data){
var self = this;
var queries = [];
var results = [];
$.each(data, function (index, value) {
queries.push(self.serviceQuery(value.id));
});
$.when(queries).done(function (response) {
$.each(response, function (index,val) {
val.then(function (result){
results.push(result[0]);
});
});
self.renderResult(results);
});
},
renderResult: function(results){
$.each(results, function (index, value) {
///Error here cause the value.Name is undefined
console.info(value.name);
});
}
Any Idea on how to wait for all the ajax calls to finish before execute the render function?
Use .apply() at $.when() call to handle an array of Promises. Note also that .then() returns results asynchronously
let queries = [
// `$.ajax()` call and response
new $.Deferred(function(dfd) {
setTimeout(dfd.resolve, Math.floor(Math.random() * 1000)
// response, textStatus, jqxhr
, [{name:"a"}, "success", {}])
})
// `$.ajax()` call and response
, new $.Deferred(function(dfd) {
setTimeout(dfd.resolve, Math.floor(Math.random() * 1000)
// response, textStatus, jqxhr
, [{name:"b"}, "success", {}])
})
];
$.when.apply(null, queries)
.then(function() {
renderResult($.map(arguments, function(res) {return res[0]}));
});
function renderResult(results) {
$.each(results, function (index, value) {
console.info(value.name);
});
}
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js">
</script>
Change the $.each to a for loop. It is possible that the .then on the value isn't finished processing before the loop completes. $.each is synchronous but the .then generally, means it's a promise and isn't synchronous.
$.each(response, function (index,val) {
val.then(function (result){
results.push(result[0]);
});
});
Change too
for(var idx = 0; idx < response.length; idx++) {
results.push(response[idx]);
}
or keeping with the each you may just need to remove the .then call.
$.each(response, function (index,val) {
results.push(val[0]);
});
I see one potential issue here:
$.when(queries).done(function (response) {
$.each(response, function (index,val) {
val.then(function (result){
results.push(result[0]);
});
});
self.renderResult(results);
});
Basically, your pseudocode says this:
For each return value from your $.when() command, take the value of that and return a new promise (via val.then). However, because you never wait for the val deferred to run, results.push is not guaranteed to be called before your self.renderResult(results) call.
The code looks weird to me in that you would need two nested deferreds for an ajax call. So I think a larger meta issue is why do you need to do val.then in the first place. However, given the current code, you would need to do something like this:
var innerDeferreds = [];
$.each(response, function (index,val) {
innerDeferreds.push(val.then(function (result){
results.push(result[0]);
}));;
});
$.when(innerDeferreds).then(function() { self.renderResult(results); });
Again, my guess is that you don't need the val.then in the first place, but I would need to look in a debugger to see what the values of response, index and val are. (If you set up a jsfiddle that would be super helpful!)

Wait for promises inside of a angular.forEach loop

I know this has been asked quite a few times already but after a day of search I still don't get it to work, although it's just like what is shown as a solution everywhere...
I have a async request to a database which returns an array of data. For each object in this array I need to start another async request to the database and as soon as ALL of these async requests resolve, I want to return them. I read you could do it with $q.all(...)
So here's the code:
Factory.firstAsyncRequest(id).then(function (arrayWithObjects) {
var promises = [];
var dataArr = [];
angular.forEach(arrayWithObjects, function (object, key) {
var deferred = $q.defer();
promises.push(deferred);
Factory.otherAsyncRequest(key).then(function (objectData) {
dataArr.push({
name: objectData.name,
key: key,
status: objectData.status
});
deferred.resolve();
console.info('Object ' + key + ' resolved');
});
});
$q.all(promises).then(function () {
$rootScope.data = dataArr;
console.info('All resolved');
});});
From the console I see that the $q.all is resolved BEFORE each object. Did I get something wrong? This seems to work for everyone...
Your help is highly appreciated, been looking the whole night, it's 5:30am now lol..
Cheers
EDIT:
So for anyone who's coming here later: It was just the promises.push(deferred.PROMISE) bit. Tho, I read that anguar.forEach is actually not a recommended method to loop through array because it was originally not constructed to be used by the end-user. Don't know if that's correct but I figured out another way if you don't want to use angular.forEach:
Users.getAll(uid).then(function (users) {
var uids = ObjHandler.getKeys(users); //own function just iterating through Object.keys and pushing them to the array
var cntr = 0;
function next() {
if (cntr < uids.length) {
Users.getProfile(uids[cntr]).then(function (profile) {
var Profile = {
name: profile.name,
key: uids[cntr],
status: profile.status
});
dataArr[uids[cntr]] = Profile;
if(cntr===uids.length-1) {
defer.resolve();
console.info('Service: query finished');
} else {cntr++;next}
});
}
}
next();
});
And the getKey function:
.factory('ObjHandler', [
function () {
return {
getKeys: function(obj) {
var r = [];
for (var k in obj) {
if (!obj.hasOwnProperty(k))
continue;
r.push(k)
}
return r
}
};
}])
Instead of
promises.push(deferred);
Try this:
promises.push(deferred.promise);

Multiple Promise Chains in Single Function

I have some code that will dynamically generate an AJAX request based off a scenario that I'm retrieving via an AJAX request to a server.
The idea is that:
A server provides a "Scenario" for me to generate an AJAX Request.
I generate an AJAX Request based off the Scenario.
I then repeat this process, over and over in a Loop.
I'm doing this with promises here: http://jsfiddle.net/3Lddzp9j/11/
However, I'm trying to edit the code above so I can handle an array of scenarios from the initial AJAX request.
IE:
{
"base": {
"frequency": "5000"
},
"endpoints": [
{
"method": "GET",
"type": "JSON",
"endPoint": "https://api.github.com/users/alvarengarichard",
"queryParams": {
"objectives": "objective1, objective2, objective3"
}
},
{
"method": "GET",
"type": "JSON",
"endPoint": "https://api.github.com/users/dkang",
"queryParams": {
"objectives": "objective1, objective2, objective3"
}
}
]
This seems like it would be straight forward, but the issue seems to be in the "waitForTimeout" function.
I'm unable to figure out how to run multiple promise chains. I have an array of promises in the "deferred" variable, but the chain only continues on the first one--despite being in a for loop.
Could anyone provide insight as to why this is? You can see where this is occuring here: http://jsfiddle.net/3Lddzp9j/10/
The main problems are that :
waitForTimeout isn't passing on all the instructions
even if waitForTimeout was fixed, then callApi isn't written to perform multiple ajax calls.
There's a number of other issues with the code.
you really need some data checking (and associated error handling) to ensure that expected components exist in the data.
mapToInstruction is an unnecessary step - you can map straight from data to ajax options - no need for an intermediate data transform.
waitForTimeout can be greatly simplified to a single promise, resolved by a single timeout.
synchronous functions in a promise chain don't need to return a promise - they can return a result or undefined.
Sticking with jQuery all through, you should end up with something like this :
var App = (function ($) {
// Gets the scenario from the API
// sugar for $.ajax with GET as method - NOTE: this returns a promise
var getScenario = function () {
console.log('Getting scenario ...');
return $.get('http://demo3858327.mockable.io/scenario2');
};
var checkData = function (data) {
if(!data.endpoints || !data.endpoints.length) {
return $.Deferred().reject('no endpoints').promise();
}
data.base = data.base || {};
data.base.frequency = data.base.frequency || 1000;//default value
};
var waitForTimeout = function(data) {
return $.Deferred(function(dfrd) {
setTimeout(function() {
dfrd.resolve(data.endpoints);
}, data.base.frequency);
}).promise();
};
var callApi = function(endpoints) {
console.log('Calling API with given instructions ...');
return $.when.apply(null, endpoints.map(ep) {
return $.ajax({
type: ep.method,
dataType: ep.type,
url: ep.endpoint
}).then(null, function(jqXHR, textStatus, errorThrown) {
return textStatus;
});
}).then(function() {
//convert arguments to an array of results
return $.map(arguments, function(arg) {
return arg[0];
});
});
};
var handleResults = function(results) {
// results is an array of data values/objects returned by the ajax calls.
console.log("Handling data ...");
...
};
// The 'run' method
var run = function() {
getScenario()
.then(checkData)
.then(waitForTimeout)
.then(callApi)
.then(handleResults)
.then(null, function(reason) {
console.error(reason);
})
.then(run);
};
return {
run : run
}
})(jQuery);
App.run();
This will stop on error but could be easily adapted to continue.
I'll try to answer your question using KrisKowal's q since I'm not very proficient with the promises generated by jQuery.
First of all I'm not sure whether you want to solve the array of promises in series or in parallel, in the solution proposed I resolved all of them in parallel :), to solve them in series I'd use Q's reduce
function getScenario() { ... }
function ajaxRequest(instruction) { ... }
function createPromisifiedInstruction(instruction) {
// delay with frequency, not sure why you want to do this :(
return Q.delay(instruction.frequency)
.then(function () {
return this.ajaxRequest(instruction);
});
}
function run() {
getScenario()
.then(function (data) {
var promises = [];
var instruction;
var i;
for (i = 0; i < data.endpoints.length; i += 1) {
instruction = {
method: data.endpoints[i].method,
type: data.endpoints[i].type,
endpoint: data.endpoints[i].endPoint,
frequency: data.base.frequency
};
promises.push(createPromisifiedInstruction(instruction));
}
// alternative Q.allSettled if all the promises don't need to
// be fulfilled (some of them might be rejected)
return Q.all(promises);
})
.then(function (instructionsResults) {
// instructions results is an array with the result of each
// promisified instruction
})
.then(run)
.done();
}
run();
Ok let me explain the solution above:
first of all assume that getScenario gets you the initial json you start with (actually returns a promise which is resolved with the json)
create the structure of each instruction
promisify each instruction, so that each one is actually a promise whose
resolution value will be the promise returned by ajaxRequest
ajaxRequest returns a promise whose resolution value is the result of the request, which also means that createPromisifiedInstruction resolution value will be the resolution value of ajaxRequest
Return a single promise with Q.all, what it actually does is fulfill itself when all the promises it was built with are resolved :), if one of them fails and you actually need to resolve the promise anyways use Q.allSettled
Do whatever you want with the resolution value of all the previous promises, note that instructionResults is an array holding the resolution value of each promise in the order they were declared
Reference: KrisKowal's Q
Try utilizing deferred.notify within setTimeout and Number(settings.frequency) * (1 + key) as setTimeout duration; msg at deferred.notify logged to console at deferred.progress callback , third function argument within .then following timeout
var App = (function ($) {
var getScenario = function () {
console.log("Getting scenario ...");
return $.get("http://demo3858327.mockable.io/scenario2");
};
var mapToInstruction = function (data) {
var res = $.map(data.endpoints, function(settings, key) {
return {
method:settings.method,
type:settings.type,
endpoint:settings.endPoint,
frequency:data.base.frequency
}
});
console.log("Instructions recieved:", res);
return res
};
var waitForTimeout = function(instruction) {
var res = $.when.apply(instruction,
$.map(instruction, function(settings, key) {
return new $.Deferred(function(dfd) {
setTimeout(function() {
dfd.notify("Waiting for "
+ settings.frequency
+ " ms")
.resolve(settings);
}, Number(settings.frequency) * (1 + key));
}).promise()
})
)
.then(function() {
return this
}, function(err) {
console.log("error", err)
}
, function(msg) {
console.log("\r\n" + msg + "\r\nat " + $.now() + "\r\n")
});
return res
};
var callApi = function(instruction) {
console.log("Calling API with given instructions ..."
, instruction);
var res = $.when.apply(instruction,
$.map(instruction, function(request, key) {
return request.then(function(settings) {
return $.ajax({
type: settings.method,
dataType: settings.type,
url: settings.endpoint
});
})
})
)
.then(function(data) {
return $.map(arguments, function(response, key) {
return response[0]
})
})
return res
};
var handleResults = function(data) {
console.log("Handling data ..."
, JSON.stringify(data, null, 4));
return data
};
var run = function() {
getScenario()
.then(mapToInstruction)
.then(waitForTimeout)
.then(callApi)
.then(handleResults)
.then(run);
};
return {
// This will expose only the run method
// but will keep all other functions private
run : run
}
})($);
// ... And start the app
App.run();
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js">
</script>
jsfiddle http://jsfiddle.net/3Lddzp9j/13/
You have a return statement in the loop in your waitForTimeout function. This means that the function is going to return after the first iteration of the loop, and that is where you are going wrong.
You're also using the deferred antipattern and are using promises in places where you don't need them. You don't need to return a promise from a then handler unless there's something to await.
The key is that you need to map each of your instructions to a promise. Array#map is perfect for this. And please use a proper promise library, not jQuery promises (edit but if you absolutely must use jQuery promises...):
var App = (function ($) {
// Gets the scenario from the API
// NOTE: this returns a promise
var getScenario = function () {
console.log('Getting scenario ...');
return $.get('http://demo3858327.mockable.io/scenario');
};
// mapToInstructions is basically unnecessary. each instruction does
// not need its own timeout if they're all the same value, and you're not
// reshaping the original values in any significant way
// This wraps the setTimeout into a promise, again
// so we can chain it
var waitForTimeout = function(data) {
var d = $.Deferred();
setTimeout(function () {
d.resolve(data.endpoints);
}, data.base.frequency);
return d.promise();
};
var callApi = function(instruction) {
return $.ajax({
type: instruction.method,
dataType: instruction.type,
url: instruction.endPoint
});
};
// Final step: call the API from the
// provided instructions
var callApis = function(instructions) {
console.log(instructions);
console.log('Calling API with given instructions ...');
return $.when.apply($, instructions.map(callApi));
};
var handleResults = function() {
var data = Array.prototype.slice(arguments);
console.log("Handling data ...");
};
// The 'run' method
var run = function() {
getScenario()
.then(waitForTimeout)
.then(callApis)
.then(handleResults)
.then(run);
};
return {
run : run
}
})($);
App.run();

Parallel asynchronous Ajax requests using jQuery

I'd like to update a page based upon the results of multiple ajax/json requests. Using jQuery, I can "chain" the callbacks, like this very simple stripped down example:
$.getJSON("/values/1", function(data) {
// data = {value: 1}
var value_1 = data.value;
$.getJSON("/values/2", function(data) {
// data = {value: 42}
var value_2 = data.value;
var sum = value_1 + value_2;
$('#mynode').html(sum);
});
});
However, this results in the requests being made serially. I'd much rather a way to make the requests in parallel, and perform the page update after all are complete. Is there any way to do this?
jQuery $.when() and $.done() are exactly what you need:
$.when($.ajax("/page1.php"), $.ajax("/page2.php"))
.then(myFunc, myFailure);
Try this solution, which can support any specific number of parallel queries:
var done = 4; // number of total requests
var sum = 0;
/* Normal loops don't create a new scope */
$([1,2,3,4,5]).each(function() {
var number = this;
$.getJSON("/values/" + number, function(data) {
sum += data.value;
done -= 1;
if(done == 0) $("#mynode").html(sum);
});
});
Run multiple AJAX requests in parallel
When working with APIs, you sometimes need to issue multiple AJAX requests to different endpoints. Instead of waiting for one request to complete before issuing the next, you can speed things up with jQuery by requesting the data in parallel, by using jQuery's $.when() function:
JS
$.when($.get('1.json'), $.get('2.json')).then(function(r1, r2){
console.log(r1[0].message + " " + r2[0].message);
});
The callback function is executed when both of these GET requests finish successfully. $.when() takes the promises returned by two $.get() calls, and constructs a new promise object. The r1 and r2 arguments of the callback are arrays, whose first elements contain the server responses.
Here's my attempt at directly addressing your question
Basically, you just build up and AJAX call stack, execute them all, and a provided function is called upon completion of all the events - the provided argument being an array of the results from all the supplied ajax requests.
Clearly this is early code - you could get more elaborate with this in terms of the flexibility.
<script type="text/javascript" src="http://jqueryjs.googlecode.com/files/jquery-1.3.2.min.js"></script>
<script type="text/javascript">
var ParallelAjaxExecuter = function( onComplete )
{
this.requests = [];
this.results = [];
this.onComplete = onComplete;
}
ParallelAjaxExecuter.prototype.addRequest = function( method, url, data, format )
{
this.requests.push( {
"method" : method
, "url" : url
, "data" : data
, "format" : format
, "completed" : false
} )
}
ParallelAjaxExecuter.prototype.dispatchAll = function()
{
var self = this;
$.each( self.requests, function( i, request )
{
request.method( request.url, request.data, function( r )
{
return function( data )
{
console.log
r.completed = true;
self.results.push( data );
self.checkAndComplete();
}
}( request ) )
} )
}
ParallelAjaxExecuter.prototype.allRequestsCompleted = function()
{
var i = 0;
while ( request = this.requests[i++] )
{
if ( request.completed === false )
{
return false;
}
}
return true;
},
ParallelAjaxExecuter.prototype.checkAndComplete = function()
{
if ( this.allRequestsCompleted() )
{
this.onComplete( this.results );
}
}
var pe = new ParallelAjaxExecuter( function( results )
{
alert( eval( results.join( '+' ) ) );
} );
pe.addRequest( $.get, 'test.php', {n:1}, 'text' );
pe.addRequest( $.get, 'test.php', {n:2}, 'text' );
pe.addRequest( $.get, 'test.php', {n:3}, 'text' );
pe.addRequest( $.get, 'test.php', {n:4}, 'text' );
pe.dispatchAll();
</script>
here's test.php
<?php
echo pow( $_GET['n'], 2 );
?>
Update: Per the answer given by Yair Leviel, this answer is obsolete. Use a promise library, like jQuery.when() or Q.js.
I created a general purpose solution as a jQuery extension. Could use some fine tuning to make it more general, but it suited my needs. The advantage of this technique over the others in this posting as of the time of this writing was that any type of asynchronous processing with a callback can be used.
Note: I'd use Rx extensions for JavaScript instead of this if I thought my client would be okay with taking a dependency on yet-another-third-party-library :)
// jQuery extension for running multiple async methods in parallel
// and getting a callback with all results when all of them have completed.
//
// Each worker is a function that takes a callback as its only argument, and
// fires up an async process that calls this callback with its result.
//
// Example:
// $.parallel(
// function (callback) { $.get("form.htm", {}, callback, "html"); },
// function (callback) { $.post("data.aspx", {}, callback, "json"); },
// function (formHtml, dataJson) {
// // Handle success; each argument to this function is
// // the result of correlating ajax call above.
// }
// );
(function ($) {
$.parallel = function (anyNumberOfWorkers, allDoneCallback) {
var workers = [];
var workersCompleteCallback = null;
// To support any number of workers, use "arguments" variable to
// access function arguments rather than the names above.
var lastArgIndex = arguments.length - 1;
$.each(arguments, function (index) {
if (index == lastArgIndex) {
workersCompleteCallback = this;
} else {
workers.push({ fn: this, done: false, result: null });
}
});
// Short circuit this edge case
if (workers.length == 0) {
workersCompleteCallback();
return;
}
// Fire off each worker process, asking it to report back to onWorkerDone.
$.each(workers, function (workerIndex) {
var worker = this;
var callback = function () { onWorkerDone(worker, arguments); };
worker.fn(callback);
});
// Store results and update status as each item completes.
// The [0] on workerResultS below assumes the client only needs the first parameter
// passed into the return callback. This simplifies the handling in allDoneCallback,
// but may need to be removed if you need access to all parameters of the result.
// For example, $.post calls back with success(data, textStatus, XMLHttpRequest). If
// you need textStatus or XMLHttpRequest then pull off the [0] below.
function onWorkerDone(worker, workerResult) {
worker.done = true;
worker.result = workerResult[0]; // this is the [0] ref'd above.
var allResults = [];
for (var i = 0; i < workers.length; i++) {
if (!workers[i].done) return;
else allResults.push(workers[i].result);
}
workersCompleteCallback.apply(this, allResults);
}
};
})(jQuery);
UPDATE And another two years later, this looks insane because the accepted answer has changed to something much better! (Though still not as good as Yair Leviel's answer using jQuery's when)
18 months later, I just hit something similar. I have a refresh button, and I want the old content to fadeOut and then the new content to fadeIn. But I also need to get the new content. The fadeOut and the get are asynchronous, but it would be a waste of time to run them serially.
What I do is really the same as the accepted answer, except in the form of a reusable function. Its primary virtue is that it is much shorter than the other suggestions here.
var parallel = function(actions, finished) {
finishedCount = 0;
var results = [];
$.each(actions, function(i, action) {
action(function(result) {
results[i] = result;
finishedCount++;
if (finishedCount == actions.length) {
finished(results);
}
});
});
};
You pass it an array of functions to run in parallel. Each function should accept another function to which it passes its result (if any). parallel will supply that function.
You also pass it a function to be called when all the operations have completed. This will receive an array with all the results in. So my example was:
refreshButton.click(function() {
parallel([
function(f) {
contentDiv.fadeOut(f);
},
function(f) {
portlet.content(f);
},
],
function(results) {
contentDiv.children().remove();
contentDiv.append(results[1]);
contentDiv.fadeIn();
});
});
So when my refresh button is clicked, I launch jQuery's fadeOut effect and also my own portlet.content function (which does an async get, builds a new bit of content and passes it on), and then when both are complete I remove the old content, append the result of the second function (which is in results[1]) and fadeIn the new content.
As fadeOut doesn't pass anything to its completion function, results[0] presumably contains undefined, so I ignore it. But if you had three operations with useful results, they would each slot into the results array, in the same order you passed the functions.
you could do something like this
var allData = []
$.getJSON("/values/1", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
$.getJSON("/values/2", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
var processData = function(data){
var sum = data[0] + data[1]
$('#mynode').html(sum);
}
Here's an implementation using mbostock/queue:
queue()
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 1}), delay: 1}, function(data) {
callback(null, data.value);
});
})
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 3}), delay: 2}, function(data) {
callback(null, data.value);
});
})
.awaitAll(function(err, results) {
var result = results.reduce(function(acc, value) {
return acc + value;
}, 0);
console.log(result);
});
The associated fiddle: http://jsfiddle.net/MdbW2/
With the following extension of JQuery (to can be written as a standalone function you can do this:
$.whenAll({
val1: $.getJSON('/values/1'),
val2: $.getJSON('/values/2')
})
.done(function (results) {
var sum = results.val1.value + results.val2.value;
$('#mynode').html(sum);
});
The JQuery (1.x) extension whenAll():
$.whenAll = function (deferreds) {
function isPromise(fn) {
return fn && typeof fn.then === 'function' &&
String($.Deferred().then) === String(fn.then);
}
var d = $.Deferred(),
keys = Object.keys(deferreds),
args = keys.map(function (k) {
return $.Deferred(function (d) {
var fn = deferreds[k];
(isPromise(fn) ? fn : $.Deferred(fn))
.done(d.resolve)
.fail(function (err) { d.reject(err, k); })
;
});
});
$.when.apply(this, args)
.done(function () {
var resObj = {},
resArgs = Array.prototype.slice.call(arguments);
resArgs.forEach(function (v, i) { resObj[keys[i]] = v; });
d.resolve(resObj);
})
.fail(d.reject);
return d;
};
See jsbin example:
http://jsbin.com/nuxuciwabu/edit?js,console
The most professional solution for me would be by using async.js and Array.reduce like so:
async.map([1, 2, 3, 4, 5], function (number, callback) {
$.getJSON("/values/" + number, function (data) {
callback(null, data.value);
});
}, function (err, results) {
$("#mynode").html(results.reduce(function(previousValue, currentValue) {
return previousValue + currentValue;
}));
});
If the result of one request depends on the other, you can't make them parallel.
Building on Yair's answer.
You can define the ajax promises dynamically.
var start = 1; // starting value
var len = 2; // no. of requests
var promises = (new Array(len)).fill().map(function() {
return $.ajax("/values/" + i++);
});
$.when.apply($, promises)
.then(myFunc, myFailure);
Suppose you have an array of file name.
var templateNameArray=["test.html","test2.html","test3.html"];
htmlTemplatesLoadStateMap={};
var deffereds=[];
for (var i = 0; i < templateNameArray.length; i++)
{
if (!htmlTemplatesLoadStateMap[templateNameArray[i]])
{
deferreds.push($.get("./Content/templates/" +templateNameArray[i],
function (response, status, xhr) {
if (status == "error") { }
else {
$("body").append(response);
}
}));
htmlTemplatesLoadStateMap[templateNameArray[i]] = true;
}
}
$.when.all(deferreds).always(function(resultsArray) { yourfunctionTobeExecuted(yourPayload);
});
I needed multiple, parallel ajax calls, and the jquery $.when syntax wasn't amenable to the full $.ajax format I am used to working with. So I just created a setInterval timer to periodically check when each of the ajax calls had returned. Once they were all returned, I could proceed from there.
I read there may be browser limitations as to how many simultaneous ajax calls you can have going at once (2?), but .$ajax is inherently asynchronous, so making the ajax calls one-by-one would result in parallel execution (within the browser's possible limitation).

Categories

Resources