Hi the below Javascript is called when I submit a form. It first splits a bunch of url's from a text area, it then:
1) Adds lines to a table for each url, and in the last column (the 'status' column) it says "Not Started".
2) Again it loops through each url, first off it makes an ajax call to check on the status (status.php) which will return a percentage from 0 - 100.
3) In the same loop it kicks off the actual process via ajax (process.php), when the process has completed (bearing in the mind the continuous status updates), it will then say "Completed" in the status column and exit the auto_refresh.
4) It should then go to the next 'each' and do the same for the next url.
function formSubmit(){
var lines = $('#urls').val().split('\n');
$.each(lines, function(key, value) {
$('#dlTable tr:last').after('<tr><td>'+value+'</td><td>Not Started</td></tr>');
});
$.each(lines, function(key, value) {
var auto_refresh = setInterval( function () {
$.ajax({
url: 'status.php',
success: function(data) {
$('#dlTable').find("tr").eq(key+1).children().last().replaceWith("<td>"+data+"</td>");
}
});
}, 1000);
$.ajax({
url: 'process.php?id='+value,
success: function(msg) {
clearInterval(auto_refresh);
$('#dlTable').find("tr").eq(key+1).children().last().replaceWith("<td>completed rip</td>");
}
});
});
}
What you want is to run several asynchronous actions in sequence, right? I'd build an array of the functions to execute and run it through a sequence helper.
https://github.com/michiel/asynchelper-js/blob/master/lib/sequencer.js
var actions = [];
$.each(lines, function(key, value) {
actions.push(function(callback) {
$.ajax({
url: 'process.php?id='+val,
success: function(msg) {
clearInterval(auto_refresh);
//
// Perform your DOM operations here and be sure to call the
// callback!
//
callback();
}
});
});
}
);
As you can see, we build an array of scoped functions that take an arbitrary callback as an argument. A sequencer will run them in order for you.
Use the sequence helper from the github link and run,
var sequencer = new Sequencer(actions);
sequencer.start();
It is, btw, possible to define sequencer functions in a few lines of code. For example,
function sequencer(arr) {
(function() {
((arr.length != 0) && (arr.shift()(arguments.callee)));
})();
}
AJAX is asynchronous.
That's exactly what's supposed to happen.
Instead of using each, you should send the next AJAX request in the completion handler of the previous one.
You can also set AJAX to run synchronously using the "async" property. Add the following:
$.ajax({ "async": false, ... other options ... });
AJAX API reference here. Note that this will lock the browser until the request completes.
I prefer the approach in SLaks answer (sticking with asynchronous behavior). However, it does depend on your application. Exercise caution.
I would give the same answer as this jquery json populate table
This code will give you a little idea how to use callback with loops and ajax. But I have not tested it and there will be bugs. I derived the following from my old code:-
var processCnt; //Global variable - check if this is needed
function formSubmit(){
var lines = $('#urls').val().split('\n');
$.each(lines, function(key, value) {
$('#dlTable tr:last').after('<tr><td>'+value+'</td><td>Not Started</td></tr>');
});
completeProcessing(lines ,function(success)
{
$.ajax({
url: 'process.php?id='+value,
success: function(msg) {
$('#dlTable').find("tr").eq(key+1).children().last().replaceWith("<td>completed rip</td>");
}
});
});
}
//Following two functions added by me
function completeProcessing(lines,callback)
{
processCnt= 0;
processingTimer = setInterval(function() { singleProcessing(lines[processCnt],function(completeProcessingSuccess){ if(completeProcessingSuccess){ clearInterval(processingTimer); callback(true); }})}, 1000);
}
function singleProcessing(line,callback)
{
key=processCnt;
val = line;
if(processCnt < totalFiles)
{ //Files to be processed
$.ajax({
url: 'status.php',
success: function(data) {
$('#dlTable').find("tr").eq(key+1).children().last().replaceWith("<td>"+data+"</td>");
processCnt++;
callback(false);
}
});
}
else
{
callback(true);
}
}
Related
I have a function that loads HTML from an external file via an AJAX call using jQuery.
These ajax call runs inside of a $.each loop and I need this ajax call to finish before the loop continues. Here is my code:
$('img').each(function(){
var cotainer_html = $('.cotainer_html').clone();
/* Get Image Content */
$.ajax({
url: '/assets/ajax/get_studio_image.php',
type:'GET',
success:function(data){
cotainer_html.find('.replaceme').replaceWith(data);
}
});
});
I know I can set async:false but I hear that is not a good idea. Any ideas?
To achieve this you can put each request in to an array and apply() that array to $.when. Try this:
var requests = [];
$('img').each(function(){
var cotainer_html = $('.cotainer_html').clone();
/* Get Image Content */
requests.push($.ajax({
url: '/assets/ajax/get_studio_image.php',
type:'GET',
success:function(data){
cotainer_html.find('.replaceme').replaceWith(data);
}
}));
});
$.when.apply($, requests).done(function() {
console.log('all requests complete');
});
Note that you're replacing the same content on each request, so the only one which will have any effect on the UI is the last request. The preceding ones are redundant.
Also note that you should never, ever use async: false. It locks the UI thread of the browser until the request completes, which makes it look like it has crashed to the user. It is terrible practice. Use callbacks.
The OP appears to want the calls to run in series, not in parallel
If this is the case you could use recursion:
function makeRequest($els, index) {
var cotainer_html = $('.cotainer_html').clone();
$.ajax({
url: '/assets/ajax/get_studio_image.php',
type:'GET',
success:function(data){
cotainer_html.find('.replaceme').replaceWith(data);
if ($els.eq(index + 1).length) {
makeRequest($els, ++index);
} else {
console.log('all requests complete');
}
}
});
}
makeRequest($('img'), 0);
You can use a pseudo-recursive loop:
var imgs = $('img').get();
var done = (function loop() {
var img = imgs.shift();
if (img) {
var cotainer_html = $('.cotainer_html').clone();
/* Get Image Content */
return $.get('/assets/ajax/get_studio_image.php')
.then(function(data) {
cotainer_html.find('.replaceme').replaceWith(data);
}).then(loop);
} else {
return $.Deferred().resolve(); // resolved when the loop terminates
}
})();
This will take each element of the list, get the required image, and .then() start over until there's nothing left.
The immediately invoked function expression itself returns a Promise, so you can chain a .then() call to that that'll be invoked once the loop has completed:
done.then(function() {
// continue your execution here
...
});
using Backbone.js we have an application, in which on a certain occasion we need to send an ajax post to a clients webservice.
however, the content to be posted, is dynamic, and is decided by a certain array.
for each item in the array we need to go fetch a piece of data.
after assembling the data that aggregated object needs to be sent.
as of now, i have a synchronous approach, though i feel that this is not the best way.
var arrParams = [{id: 1, processed: false},{id: 7, processed: false},{id: 4, processed: false}];
function callback(data) {
$.post()... // jquery ajax to post the data... }
function fetchData(arr, data, callback) {
var currentId = _(arr).find(function(p){ return p.processed === false; }).id; // getting the ID of the first param that has processed on false...
// ajax call fetching the results for that parameter.
$.ajax({
url: 'http://mysuperwebservice.com',
type: 'GET',
dataType: 'json',
data: {id: currentId},
success: function(serviceData) {
data[currentId] = serviceData; // insert it into the data
_(arr).find(function(p){ return p.id === currentId; }).processed = true; // set this param in the array to 'being processed'.
// if more params not processed, call this function again, else continue to callback
if(_(arr).any(function(p){ return p.processed === false }))
{
fetchData(arr, data, callback);
}
else
{
callback(data);
}
},
error: function(){ /* not important fr now, ... */ }
});
}
fetchData(arrParams, {}, callback);
isn't there a way to launch these calls asynchronous and execute the callback only when all results are in?
You have to use JQuery $.Deferred object to sync them. Look at this article Deferred Docs
You can use in this way:
$.when(
$.ajax({ url : 'url1' }),
$.ajax({ url : 'url2' }) // or even more calls
).done(done_callback).fail(fail_callback);
I would do something like this:
make a function that besides the parameters that you pass to fetchData also gets the index within arrParams, then in a loop call that function for every element. In the success function set processed in your element to true, and check if "you're the last" by going through the array and see if all the rest is true as well.
A bit of optimization can be if you make a counter:
var todo = arrParams.length;
and in the success you do:
if (--todo == 0) {
callback(...)
}
I currently have the following code:
function render(url1, url2, message) {
utility.messageBoxOpen(message);
$.getJSON(url1, function (items) {
// Do something
utility.messageBoxClose();
});
$.getJSON(url2, function (items) {
// Do something
});
}
When the function is executed a modal window appears to inform the user something is loading. Initially I only had one $getJSON request so when the request was done the modal window closed as per the code above.
I am looking to add another $getJSON request but want to close the modal window only when both $getJSON requests have completed.
What is the best way of achieving this?
You're looking for $.when()
All jQuery ajax requests (including shortcuts like getJSON) return deferred objects which can be used to control other actions.
var dfd1 = $.getJSON(url1, function (items) {
// Do something
});
var dfd1 = $.getJSON(url2, function (items) {
// Do something
});
$.when(dfd1, dfd2).then(function(){
//both succeeded
utility.messageBoxClose();
},function(){
//one or more of them failed
});
If you don't care whether the getJSONs come back successfully or not and instead only care that they are done you can instead:
$.when(dfd1, dfd2).done( utility.messageBoxClose );
A variable
function render(url1, url2, message) {
utility.messageBoxOpen(message);
var isOneDone = false;
$.getJSON(url1, function (items) {
// Do something
if(!isOneDone)
isOneDone = true;
else
utility.messageBoxClose();
});
$.getJSON(url2, function (items) {
// Do something
if(!isOneDone)
isOneDone = true;
else
utility.messageBoxClose();
});
}
You can replace the getJSON() call to one using $.ajax which accomplishes the same thing but gives you more flexibility:
$.ajax({
url: http://whatever,
dataType: 'json',
async: false,
data: {data},
success: function(data) {
// do the thing
}
});
Note the async:false part - this makes the code execution pause until the request is completed. So you could simply make your two calls this way, and close the dialog after the second call is completed.
function render(url1, url2, message) {
utility.messageBoxOpen(message);
$.when($.getJSON(url1, function (items) {
// Do something
utility.messageBoxClose();
}), $.getJSON(url2, function (items) {
// Do something
})).then(function () {
//Both complete
});
}
jQuery.when
I have a code snipet like this:
var nbrPrevArt = $("#accordion > div .accordionPanel").size();
var processing = false;
if (nbrPrevArt == 0) {
processing = true;
getArticlesPreview();//Fetches articles first
}
//loop that makes the browser to wait until finishes "getArticlesPreview()"
while(!processing)
{
}
//getArticlesPreview() is finished now we can safely execute this step
$.ajax({
type: "GET",
url: "http://mydomain.com/somepage.htl",
success: function(data) { alert(data);}
});
//-----------------------------------
function getArticlesPreview()
{
//lenghty operation
processing = false;
}
don't look much practical because i am using a loop to make it wait until the function is competely executed to perform next step.
Is there a way to define a callback message to be called when the first operation is done and the have my second step ( the $.ajax call) inside it to run properly?
Thank you in advance!
Teixeira
You could create your callback yourself, by using the apply function. You juste have to add a callback parameter to getArticlesPreview, and put the function inside this callback.
This could look like this :
function makeAjax()
{
$.ajax({
type: "GET",
url: "http://mydomain.com/somepage.htl",
success: function(data) { alert(data);}
});
}
function getArticlesPreview(callback)
{
//lenghty operation
callback.apply(this)
}
var nbrPrevArt = $("#accordion > div .accordionPanel").size();
if (nbrPrevArt == 0) {
getArticlesPreview(makeAjax);
}
I stumbled on a piece of Ajax code that is not 100% safe since it's mixing asynchronous/synchronous type of code... so basically in the code below I have a jQuery.each in which it grabs information on the elements and launch an Ajax get request for each:
$(search).each(function() {
$.ajax({
url: 'save.x3?id='+$(this).attr("id")+'value='$(this).data("value");
success: function(o){
//Update UI
},
error: function(o){
//Update UI
}
});
});
//code to do after saving...
So obviously the 'code to do after saving...' often gets executed before all the requests are completed. In the ideal world I would like to have the server-side code handle all of them at once and move //code to do after saving in the success callback but assuming this is not possible, I changed the code to something like this to make sure all requests came back before continuing which I'm still not in love with:
var recs = [];
$(search).each(function() {
recs[recs.length] = 'save.x3?id='+$(this).attr("id")+'value='$(this).data("value");
});
var counter = 0;
function saveRecords(){
$.ajax({
url: recs[counter],
success: function(o){
//Update progress
if (counter<recs.length){
counter++;
saveRecords();
}else{
doneSavingRecords();
}
},
error: function(o){
//Update progress
doneSavingRecords(o.status);
}
});
}
function doneSavingRecords(text){
//code to do after saving...
}
if (recs.length>0){
saveRecords(); //will recursively callback itself until a failed request or until all records were saved
}else{
doneSavingRecords();
}
So I'm looking for the 'best' way to add a bit of synchronous functionality to a series of asynchronous calls ?
Thanks!!
Better Answer:
function saveRecords(callback, errorCallback){
$('<div></div>').ajaxStop(function(){
$(this).remove(); // Keep future AJAX events from effecting this
callback();
}).ajaxError(function(e, xhr, options, err){
errorCallback(e, xhr, options, err);
});
$(search).each(function() {
$.get('save.x3', { id: $(this).attr("id"), value: $(this).data("value") });
});
}
Which would be used like this:
saveRecords(function(){
// Complete will fire after all requests have completed with a success or error
}, function(e, xhr, options, err){
// Error will fire for every error
});
Original Answer: This is good if they need to be in a certain order or you have other regular AJAX events on the page that would affect the use of ajaxStop, but this will be slower:
function saveRecords(callback){
var recs = $(search).map(function(i, obj) {
return { id: $(obj).attr("id"), value: $(obj).data("value") };
});
var save = function(){
if(!recs.length) return callback();
$.ajax({
url: 'save.x3',
data: recs.shift(), // shift removes/returns the first item in an array
success: function(o){
save();
},
error: function(o){
//Update progress
callback(o.status);
}
});
}
save();
}
Then you can call it like this:
saveRecords(function(error){
// This function will run on error or after all
// commands have run
});
If I understand what you're asking, I think you could use $.ajaxStop() for this purpose.
This is easily solved by calling the same function to check that all AJAX calls are complete. You just need a simple queue shared between functions, and a quick check (no loops, timers, promises, etc).
//a list of URLs for which we'll make async requests
var queue = ['/something.json', '/another.json'];
//will contain our return data so we can work with it
//in our final unified callback ('displayAll' in this example)
var data = [];
//work through the queue, dispatch requests, check if complete
function processQueue( queue ){
for(var i = 0; i < queue.length; i++){
$.getJSON( queue[i], function( returnData ) {
data.push(returnData);
//reduce the length of queue by 1
//don't care what URL is discarded, only that length is -1
queue.pop();
checkIfLast(displayAll(data));
}).fail(function() {
throw new Error("Unable to fetch resource: " + queue[i]);
});
}
}
//see if this is last successful AJAX (when queue == 0 it is last)
//if this is the last success, run the callback
//otherwise don't do anything
function checkIfLast(callback){
if(queue.length == 0){
callback();
}
}
//when all the things are done
function displayAll(things){
console.log(things); //show all data after final ajax request has completed.
}
//begin
processQueue();
Edit: I should add that I specifically aimed for an arbitrary number of items in the queue. You can simply add another URL and this will work just the same.
>> In the ideal world I would like to have the server-side code handle all of them at once and move //code to do after saving in the success callback
You'll need to think about this in terms of events.
Closure's net.BulkLoader (or a similar approach) will do it for you:
http://closure-library.googlecode.com/svn/trunk/closure/goog/docs/class_goog_net_BulkLoader.html
http://closure-library.googlecode.com/svn/trunk/closure/goog/docs/closure_goog_net_bulkloader.js.source.html
See:
goog.net.BulkLoader.prototype.handleSuccess_ (for individual calls)
&
goog.net.BulkLoader.prototype.finishLoad_ (for completion of all calls)