I am working on a page that uses JavaScript to manage a queue. My challenge is my code has nested callbacks. The nested callbacks are confusing me in regards to the scope of my queue. Currently, I have the following:
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = [];
MyApp.queueIsLocked = false;
MyApp.enqueue = function(item, onSuccess, onFailure) {
if (!MyApp.queueIsLocked) {
MyApp.queueIsLocked = true;
MyApp.myQueue.push(item);
MyApp.queueIsLocked = false;
item.send(
function() {
console.log('item: ' + item.id);
MyApp.queueIsLocked = true;
MyApp.findItemById(item.id,
function(index) {
if (index !== -1) {
MyApp.myQueue.splice(index, 1);
MyApp.queueIsLocked = false;
if (onSuccess) {
onSuccess(item.id);
}
}
}
);
},
function() {
alert('Unable to send item to the server.');
if (onFailure) {
onFailure();
}
}
);
}
};
MyApp.findItemById = function(id, onComplete) {
var index = -1;
if (MyApp.queueIsLocked) {
setTimeout(function() {
// Attempt to find the index again.
}, 100);
} else {
MyApp.queueIsLocked = true;
for (var i=0; i<MyApp.myQueue.length; i++) {
if (MyApp.myQueue[i].id === id) {
index = i;
break;
}
}
}
if (onComplete) {
onComplete(index);
}
};
The send function behaves differently based on the details of item. Sometimes the item will be sent to one server. Sometimes, it will be sent to multiple servers. Either way, I do not know when the item will be done being "sent". For that reason, I'm using a callback to manage the queue. When the item is done being "sent", I want to remove it from the queue. I need to use either a timeout or interval to check to see if the queue is locked or not. If its not locked, I want to remove the item from the queue. This check is adding another level of nesting that is confusing me.
My challenge is, I do not believe that the scope of index is working like I expected. I feel like I'm getting a race condition. I'm basing this on the fact that I've written the following Jasmine test:
describe('Queue', function() {
describe('Approach 1', function() {
it('should do something', function() {
MyApp.enqueue({id:'QRA', text:'Test A'});
});
});
describe('Approach 2', function() {
it('should successfully queue and dequeue items', function() {
MyApp.enqueue({id:'WX1', text:'Test 1'});
MyApp.enqueue({id:'QV2', text:'Test 2'});
MyApp.enqueue({id:'ZE3', text:'Test 3'});
});
});
});
When I execute the test, I see the following in the console window:
item: QRA index: 1
item: WX1 index: 2
item: QV2 index: 3
item: ZE3 index: 4
Its like the items aren't getting dequeued like I would expect. Am I way off base in my approach of managing a queue? What am I doing wrong?
Thank you for any assistance.
Here are some questions you need to think through and answer for yourself about your intent and design:
It sounds like the queue represents items you are trying to send to the server. You are adding items to the queue that need to be sent, and removing them from the queue after they have been successfully sent.
Do you want your code to send multiple items simultaneously, in parallel? For example, item A is added to the queue, then sent. Before the asynchronous send for A finishes, item B is added to the list. Should the code try to send item B before the send of item A finishes? Based on your code, it sounds like yes.
It seems that you don't really want/need a queue, per se, so much as you want a list to track which items are in the process of being sent. "Queue" implies that objects are being processed in some kind of FIFO order.
If you just want to track items based on id, then you can use an object instead. For example:
MyApp.items = {};
MyApp.addItem = function(item){
MyApp.items[item.id] = item;
item.send(
function(){ // success
MyApp.removeItem(item.id)
}
);
}
MyApp.removeItem = function(id){
delete MyApp.items[id];
onSuccess(id);
}
Also, I don't think you need a lock on the queue. Javascript is single-threaded, so you'll never have a case where two parts of your code are trying to operate on the queue at the same time. When an ajax call finishes asynchronously, your callback code won't actually be executed until any other code currently executing finishes.
The big flaw I'm seeing is that you call MyApp.queueIsLocked = true immediately before MyApp.findItemById. Because it's locked, the function sets up a timeout (that does nothing), and proceeds to call onComplete(-1). -1 is then explicitly ignored by onComplete, failing to dequeue, and locking your queue.
You probably meant to retry the find, like this:
setTimeout(function() {
// Attempt to find the index again.
MyApp.findItemById(id, onComplete);
}, 100);
I'm not sure, but I think Jasmine requires explicit instruction to get Timeout functions to fire, using jasmine.clock().tick
That said, I suggest removing all of the references to queueIsLocked, including the above timeout code. Also, if item.id is always a unique string, you can use an object instead of an array to store your values.
Here is a suggested iteration, staying as true to the original API as possible:
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = {};
//Sends the item, calling onSuccess or onFailure when finished
// item will appear in MyApp.myQueue while waiting for a response from send
MyApp.enqueue = function(item, onSuccess, onFailure) {
MyApp.myQueue[item.id] = item;
item.send(function() {
console.log('item: ' + item.id);
delete MyApp.myQueue[item.id];
if (onSuccess) {
onSuccess(item.id);
}
}, function() {
alert('Unable to send item to the server.');
if (onFailure) {
onFailure();
}
});
};
//Returns the Item in the queue, or undefined if not found
MyApp.findItemById = function(id, onComplete) {
if (onComplete) {
onComplete(id);
}
return MyApp.myQueue[id];
};
Try to use ECMA 6 Promise or any promise from js framework.Promiseis more suitable for this task. see more at https://developer.mozilla.org/
function MyApp() {}
module.exports = MyApp;
MyApp.myQueue = [];
MyApp.queueIsLocked = false;
MyApp.enqueue = function(item) {
return new Promise(function(resolve, reject) {
if (!MyApp.queueIsLocked) {
MyApp.queueIsLocked = true;
MyApp.myQueue.push(item);
MyApp.queueIsLocked = false;
var onResolve = function() {
console.log('item: ' + item.id);
MyApp.queueIsLocked = true;
MyApp.findItemById(item.id).then(function(index){
if (index !== -1) {
MyApp.myQueue.splice(index, 1);
MyApp.queueIsLocked = false;
resolve(item.id);
}
});
};
item.send(onResolve,reject);
}
});
};
MyApp.findItemById = function(id) {
return new Promise(function(resolve, reject) {
var index = -1;
if (MyApp.queueIsLocked) {
setTimeout(function() {
// Attempt to find the index again.
}, 100);
} else {
MyApp.queueIsLocked = true;
for (var i=0; i<MyApp.myQueue.length; i++) {
if (MyApp.myQueue[i].id === id) {
index = i;
break;
}
}
resolve(index);
}
});
};
move MyApp.queueIsLocked = false; to the callback of server send
I have list of names separated by comma. What I want is I want to call server request for all names in a sequence and store result inside an array. I tried and it's working when I do have number of names which are there in string.
See Here - This is working when I know number of names
Now what I want is I want to make this code as generic. If I add one name in that string, It should handle automatically without adding any code for ajax request.
See Here - This is what I've tried. It's not working as expected.
shoppingList = shoppingList.split(",");
var result = [];
function fetchData(shoppingItem)
{
var s1 = $.ajax('/items/'+shoppingItem);
s1.then(function(res) {
result.push(new Item(res.label,res.price));
console.log("works fine");
});
if(shoppingList.length == 0)
{
completeCallback(result);
}
else
{
fetchData(shoppingList.splice(0,1)[0]);
}
}
fetchData(shoppingList.splice(0,1)[0]);
Problem
I am not getting how to detect that all promise object have been resolved so that I can call callback function.
To make the ajax requests in sequence, you have to put the recursive call in the callback:
function fetchList(shoppingList, completeCallback) {
var result = [];
function fetchData() {
if (shoppingList.length == 0) {
completeCallback(result);
} else {
$.ajax('/items/'+shoppingList.shift()).then(function(res) {
result.push(new Item(res.label,res.price));
console.log("works fine");
fetchData();
// ^^^^^^^^^^^
});
}
}
fetchData();
}
or you actually use promises and do
function fetchList(shoppingList) {
return shoppingList.reduce(function(resultPromise, shoppingItem) {
return resultPromise.then(function(result) {
return $.ajax('/items/'+shoppingItem).then(function(res) {
result.push(new Item(res.label,res.price));
return result;
});
});
}, $.when([]));
}
(updated jsfiddle)
Notice there is nothing in the requirements of the task about the ajax requests to be made sequentially. You could also let them run in parallel and wait for all of them to finish:
function fetchList(shoppingList) {
$.when.apply($, shoppingList.map(function(shoppingItem) {
return $.ajax('/items/'+shoppingItem).then(function(res) {
return new Item(res.label,res.price);
});
})).then(function() {
return Array.prototype.slice.call(arguments);
})
}
(updated jsfiddle)
// global:
var pendingRequests = 0;
// after each ajax request:
pendingRequests++;
// inside the callback:
if (--pendingRequest == 0) {
// all requests have completed
}
I have modified your code to minimal to make it work - Click here.
Please note your last assertion will fail as the item promise is not resolved in linear manner. Thus sequence of the item will change.
I have done a lot of reading around this, but ultimately the tutorials and guides I have found differ too much for me to get a decent grasp on this concept.
This is what I want to achieve:
1) Simple http request from our server [Any API for demonstration]
2) Run a function with data from (1). [Remove a property from the object]
3) Use result and length of (2) to run a loop of $http requests to our server. [Or any server]
4) This will result in 6 different objects. Run a function on these 6 objects. [Add a property]
5) Once ALL of this is done, run a separate function [Log "finished"]
How can this be achieved using promises? How do I pass data from (1) via a promise to (2)? Is this the right way to achieve what I need to do?
If anyone can show me how this should be structured it would be immensely helpful; I have kept the functions as simple as possible for this question.
Yes, promises are very nice to structure solutions for this kind of problems.
Simplified solution (more or less pseudo-code):
$http(...)
.then(function(response) {
// do something with response, for example:
var list = reponse.data.list;
// return it so that you can use it in the next 'then'.
return list;
})
.then(function(list) {
var promises = [];
angular.forEach(list, function(item) {
// perform a request for each item
var promise = $http(...).then(function(itemResponse) {
itemResponse.extraProperty = true;
return itemResponse;
});
// we make an array of promises
promises.push(promise);
});
// combine all promises into one and return it for the next then()
return $q.all(promises);
})
.then(function(itemsList) {
// itemsList is now an array of all parsed item responses.
console.log(itemsList);
});
(Hopefully this is right, I did not tested it.)
As you can see, you can return values in a callback to pass it to the next then(), or you can pass a promise, and this will result in calling the next callback when it resolves. $q.all() is used to combine multiple promises into one and resolve if all are resolved.
Edit: I realised that you can optionally leave out these three lines:
return list;
})
.then(function(list) {
But it is nice syntax though, because the separation of tasks is more visible.
Check code below, it could contains syntax error, the important is the structure. Step3 contains multiple(6) $http requests, it waits until the last request response to return a unique response object (array) containing response for each $http requets.
//Step 1
var Step1 = function () {
$http.get('api/controller').success(function (resp) {
var object1 = resp;
Step2(object1);
Step3(object1).then(function (resp) {
//resp.data is an array containing the response of each $http request
Step4(resp);
Step5();
});
});
}
//Step2
var Step2 = function(obj){
//do whatever with the object
}
//Step3
var Step3 = function (object1) {
var call = $q.defer();
var get1 = $http.get(object1[0].url);
var get2 = $http.get(object[1].url2);
//...
var get6 = $http.get(object[5].url6);
$q.all([get1, get2,..get6]).then(function (resp) {
call.resolve(resp);
});
return call.promise;
}
//Step4
var Step4 = function (resp) {
for (var i=0; i<resp.data.lenght;i++){
DoWhatEver(resp.data[i]);
};
}
//Step5
var Step5 = function () {
alert("Finished");
}
Step1(); //Call Step1 function
Don't know why you have difficulty implementing this, but maybe $q.all() is what you're missing:
var config1={method:'GET',url:'/api/...'};
$http(config1).success(function(resultsFrom1){
functionForResultsOf1(resultsFrom1);
})
var functionForResultsOf1 = function(resultsOf1){
//remove something from the result, assuming this is a synchronous operation
resultsOf1.splice()...;
var promises=makePromises(*pass whatever you want*);
$q.all(promises).then(function(aggregateOfAllCallsToServer){
angular.forEach(aggregateOfAllCallsToServer,function(data){
//do something to data from each call to the server
})
console.log("finished");
})
}
var makePromises = function(serverUrls){
var promises = [];
angular.forEach(serverUrls, function(url) {
var promise=$http({
url : '/api/'+url,
method: 'GET',
})
promises.push(promise);
});
return $q.all(promises);
}
I'd like to update a page based upon the results of multiple ajax/json requests. Using jQuery, I can "chain" the callbacks, like this very simple stripped down example:
$.getJSON("/values/1", function(data) {
// data = {value: 1}
var value_1 = data.value;
$.getJSON("/values/2", function(data) {
// data = {value: 42}
var value_2 = data.value;
var sum = value_1 + value_2;
$('#mynode').html(sum);
});
});
However, this results in the requests being made serially. I'd much rather a way to make the requests in parallel, and perform the page update after all are complete. Is there any way to do this?
jQuery $.when() and $.done() are exactly what you need:
$.when($.ajax("/page1.php"), $.ajax("/page2.php"))
.then(myFunc, myFailure);
Try this solution, which can support any specific number of parallel queries:
var done = 4; // number of total requests
var sum = 0;
/* Normal loops don't create a new scope */
$([1,2,3,4,5]).each(function() {
var number = this;
$.getJSON("/values/" + number, function(data) {
sum += data.value;
done -= 1;
if(done == 0) $("#mynode").html(sum);
});
});
Run multiple AJAX requests in parallel
When working with APIs, you sometimes need to issue multiple AJAX requests to different endpoints. Instead of waiting for one request to complete before issuing the next, you can speed things up with jQuery by requesting the data in parallel, by using jQuery's $.when() function:
JS
$.when($.get('1.json'), $.get('2.json')).then(function(r1, r2){
console.log(r1[0].message + " " + r2[0].message);
});
The callback function is executed when both of these GET requests finish successfully. $.when() takes the promises returned by two $.get() calls, and constructs a new promise object. The r1 and r2 arguments of the callback are arrays, whose first elements contain the server responses.
Here's my attempt at directly addressing your question
Basically, you just build up and AJAX call stack, execute them all, and a provided function is called upon completion of all the events - the provided argument being an array of the results from all the supplied ajax requests.
Clearly this is early code - you could get more elaborate with this in terms of the flexibility.
<script type="text/javascript" src="http://jqueryjs.googlecode.com/files/jquery-1.3.2.min.js"></script>
<script type="text/javascript">
var ParallelAjaxExecuter = function( onComplete )
{
this.requests = [];
this.results = [];
this.onComplete = onComplete;
}
ParallelAjaxExecuter.prototype.addRequest = function( method, url, data, format )
{
this.requests.push( {
"method" : method
, "url" : url
, "data" : data
, "format" : format
, "completed" : false
} )
}
ParallelAjaxExecuter.prototype.dispatchAll = function()
{
var self = this;
$.each( self.requests, function( i, request )
{
request.method( request.url, request.data, function( r )
{
return function( data )
{
console.log
r.completed = true;
self.results.push( data );
self.checkAndComplete();
}
}( request ) )
} )
}
ParallelAjaxExecuter.prototype.allRequestsCompleted = function()
{
var i = 0;
while ( request = this.requests[i++] )
{
if ( request.completed === false )
{
return false;
}
}
return true;
},
ParallelAjaxExecuter.prototype.checkAndComplete = function()
{
if ( this.allRequestsCompleted() )
{
this.onComplete( this.results );
}
}
var pe = new ParallelAjaxExecuter( function( results )
{
alert( eval( results.join( '+' ) ) );
} );
pe.addRequest( $.get, 'test.php', {n:1}, 'text' );
pe.addRequest( $.get, 'test.php', {n:2}, 'text' );
pe.addRequest( $.get, 'test.php', {n:3}, 'text' );
pe.addRequest( $.get, 'test.php', {n:4}, 'text' );
pe.dispatchAll();
</script>
here's test.php
<?php
echo pow( $_GET['n'], 2 );
?>
Update: Per the answer given by Yair Leviel, this answer is obsolete. Use a promise library, like jQuery.when() or Q.js.
I created a general purpose solution as a jQuery extension. Could use some fine tuning to make it more general, but it suited my needs. The advantage of this technique over the others in this posting as of the time of this writing was that any type of asynchronous processing with a callback can be used.
Note: I'd use Rx extensions for JavaScript instead of this if I thought my client would be okay with taking a dependency on yet-another-third-party-library :)
// jQuery extension for running multiple async methods in parallel
// and getting a callback with all results when all of them have completed.
//
// Each worker is a function that takes a callback as its only argument, and
// fires up an async process that calls this callback with its result.
//
// Example:
// $.parallel(
// function (callback) { $.get("form.htm", {}, callback, "html"); },
// function (callback) { $.post("data.aspx", {}, callback, "json"); },
// function (formHtml, dataJson) {
// // Handle success; each argument to this function is
// // the result of correlating ajax call above.
// }
// );
(function ($) {
$.parallel = function (anyNumberOfWorkers, allDoneCallback) {
var workers = [];
var workersCompleteCallback = null;
// To support any number of workers, use "arguments" variable to
// access function arguments rather than the names above.
var lastArgIndex = arguments.length - 1;
$.each(arguments, function (index) {
if (index == lastArgIndex) {
workersCompleteCallback = this;
} else {
workers.push({ fn: this, done: false, result: null });
}
});
// Short circuit this edge case
if (workers.length == 0) {
workersCompleteCallback();
return;
}
// Fire off each worker process, asking it to report back to onWorkerDone.
$.each(workers, function (workerIndex) {
var worker = this;
var callback = function () { onWorkerDone(worker, arguments); };
worker.fn(callback);
});
// Store results and update status as each item completes.
// The [0] on workerResultS below assumes the client only needs the first parameter
// passed into the return callback. This simplifies the handling in allDoneCallback,
// but may need to be removed if you need access to all parameters of the result.
// For example, $.post calls back with success(data, textStatus, XMLHttpRequest). If
// you need textStatus or XMLHttpRequest then pull off the [0] below.
function onWorkerDone(worker, workerResult) {
worker.done = true;
worker.result = workerResult[0]; // this is the [0] ref'd above.
var allResults = [];
for (var i = 0; i < workers.length; i++) {
if (!workers[i].done) return;
else allResults.push(workers[i].result);
}
workersCompleteCallback.apply(this, allResults);
}
};
})(jQuery);
UPDATE And another two years later, this looks insane because the accepted answer has changed to something much better! (Though still not as good as Yair Leviel's answer using jQuery's when)
18 months later, I just hit something similar. I have a refresh button, and I want the old content to fadeOut and then the new content to fadeIn. But I also need to get the new content. The fadeOut and the get are asynchronous, but it would be a waste of time to run them serially.
What I do is really the same as the accepted answer, except in the form of a reusable function. Its primary virtue is that it is much shorter than the other suggestions here.
var parallel = function(actions, finished) {
finishedCount = 0;
var results = [];
$.each(actions, function(i, action) {
action(function(result) {
results[i] = result;
finishedCount++;
if (finishedCount == actions.length) {
finished(results);
}
});
});
};
You pass it an array of functions to run in parallel. Each function should accept another function to which it passes its result (if any). parallel will supply that function.
You also pass it a function to be called when all the operations have completed. This will receive an array with all the results in. So my example was:
refreshButton.click(function() {
parallel([
function(f) {
contentDiv.fadeOut(f);
},
function(f) {
portlet.content(f);
},
],
function(results) {
contentDiv.children().remove();
contentDiv.append(results[1]);
contentDiv.fadeIn();
});
});
So when my refresh button is clicked, I launch jQuery's fadeOut effect and also my own portlet.content function (which does an async get, builds a new bit of content and passes it on), and then when both are complete I remove the old content, append the result of the second function (which is in results[1]) and fadeIn the new content.
As fadeOut doesn't pass anything to its completion function, results[0] presumably contains undefined, so I ignore it. But if you had three operations with useful results, they would each slot into the results array, in the same order you passed the functions.
you could do something like this
var allData = []
$.getJSON("/values/1", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
$.getJSON("/values/2", function(data) {
allData.push(data);
if(data.length == 2){
processData(allData) // where process data processes all the data
}
});
var processData = function(data){
var sum = data[0] + data[1]
$('#mynode').html(sum);
}
Here's an implementation using mbostock/queue:
queue()
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 1}), delay: 1}, function(data) {
callback(null, data.value);
});
})
.defer(function(callback) {
$.post('/echo/json/', {json: JSON.stringify({value: 3}), delay: 2}, function(data) {
callback(null, data.value);
});
})
.awaitAll(function(err, results) {
var result = results.reduce(function(acc, value) {
return acc + value;
}, 0);
console.log(result);
});
The associated fiddle: http://jsfiddle.net/MdbW2/
With the following extension of JQuery (to can be written as a standalone function you can do this:
$.whenAll({
val1: $.getJSON('/values/1'),
val2: $.getJSON('/values/2')
})
.done(function (results) {
var sum = results.val1.value + results.val2.value;
$('#mynode').html(sum);
});
The JQuery (1.x) extension whenAll():
$.whenAll = function (deferreds) {
function isPromise(fn) {
return fn && typeof fn.then === 'function' &&
String($.Deferred().then) === String(fn.then);
}
var d = $.Deferred(),
keys = Object.keys(deferreds),
args = keys.map(function (k) {
return $.Deferred(function (d) {
var fn = deferreds[k];
(isPromise(fn) ? fn : $.Deferred(fn))
.done(d.resolve)
.fail(function (err) { d.reject(err, k); })
;
});
});
$.when.apply(this, args)
.done(function () {
var resObj = {},
resArgs = Array.prototype.slice.call(arguments);
resArgs.forEach(function (v, i) { resObj[keys[i]] = v; });
d.resolve(resObj);
})
.fail(d.reject);
return d;
};
See jsbin example:
http://jsbin.com/nuxuciwabu/edit?js,console
The most professional solution for me would be by using async.js and Array.reduce like so:
async.map([1, 2, 3, 4, 5], function (number, callback) {
$.getJSON("/values/" + number, function (data) {
callback(null, data.value);
});
}, function (err, results) {
$("#mynode").html(results.reduce(function(previousValue, currentValue) {
return previousValue + currentValue;
}));
});
If the result of one request depends on the other, you can't make them parallel.
Building on Yair's answer.
You can define the ajax promises dynamically.
var start = 1; // starting value
var len = 2; // no. of requests
var promises = (new Array(len)).fill().map(function() {
return $.ajax("/values/" + i++);
});
$.when.apply($, promises)
.then(myFunc, myFailure);
Suppose you have an array of file name.
var templateNameArray=["test.html","test2.html","test3.html"];
htmlTemplatesLoadStateMap={};
var deffereds=[];
for (var i = 0; i < templateNameArray.length; i++)
{
if (!htmlTemplatesLoadStateMap[templateNameArray[i]])
{
deferreds.push($.get("./Content/templates/" +templateNameArray[i],
function (response, status, xhr) {
if (status == "error") { }
else {
$("body").append(response);
}
}));
htmlTemplatesLoadStateMap[templateNameArray[i]] = true;
}
}
$.when.all(deferreds).always(function(resultsArray) { yourfunctionTobeExecuted(yourPayload);
});
I needed multiple, parallel ajax calls, and the jquery $.when syntax wasn't amenable to the full $.ajax format I am used to working with. So I just created a setInterval timer to periodically check when each of the ajax calls had returned. Once they were all returned, I could proceed from there.
I read there may be browser limitations as to how many simultaneous ajax calls you can have going at once (2?), but .$ajax is inherently asynchronous, so making the ajax calls one-by-one would result in parallel execution (within the browser's possible limitation).