I created a site which load every few seconds data from multiple sources via AJAX. However I experience some strange behavior. Here is the code:
function worker1() {
var currentUrl = 'aaa.php?var=1';
$.ajax({
cache: false,
url: currentUrl,
success: function(data) {
alert(data)
},
complete: function() {
setTimeout(worker1, 2000);
}
});
}
function worker2() {
var currentUrl = 'aaa.php?var=2';
$.ajax({
cache: false,
url: currentUrl,
success: function(data) {
alert(data)
},
complete: function() {
setTimeout(worker2, 2000);
}
});
}
The problem is that many times, one of the workers returns NaN. If I change the frequency of calls for, lets say, 2000 and 1900, then everything is working ok and I got almost no NaN results. When those frequencies are same, I get over 80% NaN results for one of the calls. It seems like the browser cannot handle two requests called at exact same time. I use only those two workers, so the browser shouldn't be overloaded by AJAX requests. Where is the problem?
Note that the aaa.php works with the mySql database and do some simple queries base on parameters in url.
All you need is $.each and the two parameter form of $.ajax
var urls = ['/url/one','/url/two', ....];
$.each(urls, function(i,u){
$.ajax(u,
{ type: 'POST',
data: {
answer_service: answer,
expertise_service: expertise,
email_service: email,
},
success: function (data) {
$(".anydivclass").text(data);
}
}
);
});
Note: The messages generated by the success callback will overwrite
each other as shown. You'll probably want to use
$('#divid').append() or similar in the success function.
Maybe, don't use these workers and use promises instead like below? Can't say anything about the errors being returned though without looking at the server code. Below is working code for what it looks like you are trying to do.
This is a simple example but you could use different resolvers for each url with an object ({url:resolverFunc}) and then iterate using Object.keys.
var urls = [
'http://jsonplaceholder.typicode.com/users/1',
'http://jsonplaceholder.typicode.com/users/2',
'http://jsonplaceholder.typicode.com/users/3',
'http://jsonplaceholder.typicode.com/users/4',
'http://jsonplaceholder.typicode.com/users/5',
'http://jsonplaceholder.typicode.com/users/6',
'http://jsonplaceholder.typicode.com/users/7'
]
function multiGet(arr) {
var promises = [];
for (var i = 0, len = arr.length; i < len; i++) {
promises.push($.get(arr[i])
.then(function(res) {
// Do something with each response
console.log(res);
})
);
}
return $.when(promises);
}
multiGet(urls);
<script src="https://ajax.googleapis.com/ajax/libs/jquery/2.1.1/jquery.min.js"></script>
Related
I have a general ajax function which I'm calling from loads of places in my code. It's pretty standard except for some extra debugging stuff I've recently added (to try to solve this issue), with a global 'ajaxworking' variable:
rideData.myAjax = function (url, type, data, successfunc) {
var dataJson = JSON.stringify(data),
thisurl = quilkinUrlBase() + url;
if (ajaxworking.length > 0) {
console.log(thisurl + ": concurrent Ajax call with: " + ajaxworking);
}
ajaxworking = thisurl;
$.ajax({
type: type,
data: dataJson,
url: thisurl,
contentType: "application/json; charset=utf-8",
dataType: "json",
async: true,
success: function (response) {
ajaxworking = '';
successfunc(response);
},
error: webRequestFailed
});
};
Now, there's one section of my code where a second ajax call is made depending on the result of the first:
getWebRides = function (date) {
var rideIDs = [];
var intdays = bleTime.toIntDays(date);
rideData.myAjax("GetRidesForDate", "POST", intdays, function (response) {
rides = response;
if (rides.length === 0) {
$('#ridelist').empty(); // this will also remove any handlers
qPopup.Alert("No rides found for " + bleTime.DateString(date));
return null;
}
$.each(rides, function (index) {
rideIDs.push(rides[index].rideID);
});
GetParticipants(rideIDs);
});
},
'GetParticipants' (which also calls 'myAjax') works fine - most of the time. But in another part of my code, 'GetWebRides' is itself called directly after another ajax call - i.e. there are 3 calls, each successive one depending on the previous. The 'top-level' call is as follows:
rideData.myAjax("SaveRide", "POST", ride, function (response) {
// if successful, response should be just a new ID
if (response.length < 5) {
// document re-arrangement code snipped here for brevity
getWebRides(date);
}
else {
qPopup.Alert(response);
}
});
so, only when there are three successive calls like this, I'm getting the 'concurrent' catch in the third one:
GetParticipants: concurrent call with GetRidesForDate
and (if allowed to proceed) this causes a nasty probem at the server with datareaders already being open. But why is this only occurring when GetParticipants is called as the third in the chain?
I see, after some research. that there are now other ways of arranging async calls, e.g. using 'Promises', but I'd like to understand what's going on here.
Solved this.
Part of the 'document re-arrangement code' that I had commented out for this post, was in fact calling another Ajax call indirectly (very indirectly, hence it took a long time to find).
A similar question has been asked before, but I don't believe it overcomes the challenges in this case because my function calls are all together, so please bear with me (I'll delete the question if appropriate).
I have a number of dashboard widgets that each make an $.ajax call, receive a JSON result and then process that to render a Google chart. The widgets can be used multiple times, so there are some duplicated AJAX calls occurring, e.g.
RenderChart('/api/LoginCount?DaysPrevious=7', 'ColumnChart'); // some parameters removed, for brevity
RenderChart('/api/LoginCount?DaysPrevious=7', 'AreaChart');
RenderChart('/api/LoginCount?DaysPrevious=7', 'Table');
The problem is that this generates multiple calls to the same URL, which is extremely wasteful. I saw in the linked question that an object can be used to cache the results, but when I applied this, it didn't seem to work because the second call to RenderChart (immediately after the first) saw there was no data (yet) in the cache, and called the URL again.
My code is:
function LoadDataFromApi(apiUrl) {
return $.ajax({
type: 'GET',
url: apiUrl,
dataType: "json",
success: function (data) { }
});
}
function RenderChart(apiUrl, chartElementId, chartType, chartOptions) {
$.when(LoadDataFromApi(apiUrl)).done(function (data) {
var el = $('#' + chartElementId);
try {
var arrayOfArrays = BuildGoogleArrayFromData(data); // Transform JSON into array of arrays (required by Google Visualization)
$(el).empty();
if (arrayOfArrays.length == 0) { // Data found?
$(el).append('<p class="noData">No data was found.</p>');
} else {
var wrapper = new google.visualization.ChartWrapper({ // alert(wrapper.getChartType()); // wrapper is the only way to get chart type
chartType: chartType,
dataTable: google.visualization.arrayToDataTable(arrayOfArrays, false),
options: chartOptions,
containerId: chartElementId
});
wrapper.draw();
}
}
catch (ex) {
$(el).append('<p class="error">An error occurred: ' + ex.message + '</p>');
}
});
}
Ideally it would be good to cache the arrayOfArrays value, as at this point all additional processing is also complete. However, getting JavaScript to see what other API calls are in progress, and wait for them is where I'm struggling. Is this possible to achieve?
If anyone can handhold me into achieving both I'll put a bonus on the question. I read about promises, but I need to support IE9+.
I can think of making a cache map with URL as its key, and the AJAX request as its value. We can change your LoadDataFromApi function to leverage this cache, and return appropriate AJAX request, if exists, else make a new request.
Following is a snippet of how it can be done.
var requestCache = {};
function LoadDataFromApi(apiUrl) {
if (!requestCache[apiUrl]) {
requestCache[apiUrl] = $.ajax({
type: 'GET',
url: apiUrl,
dataType: "json"
});
}
return requestCache[apiUrl];
}
This way, you can call LoadDataFromApi without any limit, and chain your promise handlers like this:
LoadDataFromApi('http://fake.url')
.then(function(data) {
// use the data in one widget
})
LoadDataFromApi('http://fake.url')
.then(function(data) {
// use this data in another widget
})
// ... and so on
This way the AJAX call for a particular URL will be made only once, and the result will be shared among the promise handlers.
I'm working in my own rss reader using JS, JQuery and PHP for serving the data as JSON. What I'm doing basically is making async calls to my server to get JSONs with the posts, then on 'success' I parse them using a '$.each' and with JQuery load the content in the DOM.
All of this operations were made async, but now I need to call them in a certain order, and when everithin is done THEN calling a function to process the data.
To give you some background on my task, what I'm doing is a query over a small list of RSS sources to get just the very latest post. With them I concat a string and this string is passed to a text-to-speech service.
I've managed to make it work using an arbitrary setTimeout value of 10 seconds, but my goal is to call the function when all the sources have been processed.
This is a basic version of my parser:
function urgent_posts(url) {
$.ajax({
//the location of my server
url: 'myPostServer.php?url=' + encodeURIComponent(url),
dataType: 'json',
success: function(data) {
//do this for each entry in the feed
$.each(data.feed.entries, function(key, value) {
//validate the date to get just the latest post
if (is_urgent(value.publishedDate)) {
//if condition is met save the title
save_urgent_post_title(value.title);
}
});
}
});
}
What I did to 'make it work' was the following:
$('#test_button').on('click',function(){
urgent_posts(source_1);
urgent_posts(source_2);
urgent_posts(source_3);
//and so on...
urgent_posts(source_n);
setTimeout(function(){
text_to_speech(get_urgent_post_string);
},10000);
});
I tried with no result to make use of the deferred object y JQuery like this:
function urgent_posts(url) {
var deferred = $.Deferred();
$.ajax({
//the location of my server
url: 'myPostServer.php?url=' + encodeURIComponent(url),
dataType: 'json',
success: function(data) {
//do this for each entry in the feed
$.each(data.feed.entries, function(key, value) {
//validate the date to get just the latest post
if (is_urgent(value.publishedDate)) {
//if condition is met save the title
save_urgent_post_title(value.title);
}
});
}
});
return deferred.promise();
}
And chaining everything together:
$('#test_button').on('click',function(){
urgent_posts(source_1)
.then(urgent_posts(source_2))
.then(urgent_posts(source_3))
.then(function(){
text_to_speech(get_urgent_post_string);
});
});
I'd apreciatte your comments and suggestions.
First, your deferred object is never resolved. You have to add the deferred.resolve() somewhere. Just after the $.each loop looks like a nice place.
Second, $.ajax already returns a promise. So you can just write this :
return $.ajax({
//the location of my server
url: 'myPostServer.php?url=' + encodeURIComponent(url),
dataType: 'json',
success: function(data) {
//do this for each entry in the feed
$.each(data.feed.entries, function(key, value) {
//validate the date to get just the latest post
if (is_urgent(value.publishedDate)) {
//if condition is met save the title
save_urgent_post_title(value.title);
}
});
}
});
I manage to solve the problem using this article: link
The refactored code looks like this now:
function urgent_posts_feed_1(callback) {
return $.ajax({
//the location of my server
url: 'myPostServer.php?url=' + encodeURIComponent(feed_1),
dataType: 'json',
success: function(data) {
//do this for each entry in the feed
$.each(data.feed.entries, function(key, value) {
//validate the date to get just the latest post
if (is_urgent(value.publishedDate)) {
//if condition is met save the title
save_urgent_post_title(value.title);
}
});
}
});
}
I repeat myself (I know it's not cool to do so) and write the following functions manually setting the url:
urgent_posts_feed_2
urgent_posts_feed_3
urgent_posts_feed_4
...
urgent_posts_feed_n
And finally...
urgent_post_feed_1()
.then(urgent_post_feed_2)
.then(urgent_post_feed_3)
//...
.then(urgent_post_feed_n)
.then(function(){
text_to_speech(get_urgent_post_string);
});
This way it works like a charm. Now I have to figure out how to pass parameters to the function and not interfer with the callback.
I am looking to send a number of different queries via $.ajax as JSON.
I have stored these queries in an object using the following:
var objectName = {
"name1": {
"queryName": "longname1",
"queryAction": "JSONtoSend"
},
"name2": {
"queryName": "longname2",
"queryAction": "JSONtoSend"
},
};
I am then going through the queryActions and setting them:
for (var i = 0, len = Object.keys(objectName).length; i < len; ++i) {
var indexName = Object.keys(objectName)[i];
objectName[indexName].queryAction = "";
var JSONtoTransfer = objectName[indexName].queryAction;
}
$.ajax({
type: "POST",
url: 'URL',
data: JSONtoTransfer,
contentType: "application/json; charset=utf-8",
dataType: "json",
success: function(dataReturn){
alert(dataReturn.blah);
}
});
I am unable to set the var JSONtoTransfer. It gives me an unexpected [ error. How do I get around this? I get the same error if I enter it straight into the data parameter of $.ajax.
The code I am using is storing the queries in the object correctly, but I need a way to iterate through them all and send via $.ajax.
Thank you for the help. This code is probably not the most efficient way of doing things, so if anyone has any advice, it's more than welcome too :-)
So I wrote the original code wrong, the $.ajax call should be included in the for statement. So it actually iterates....
Anyway, what I found to work was creating an array, pushing the queryAction into it and then stringifying it...
Few problems:
JSONtoTransfer is out of scope of your ajax call. If you want to populate JSONtoTransfer on every iteration and make an ajax request with this different value each time - put the ajax call inside the for loop (although I would seriously consider refactoring this so that you make one ajax call, and deserialize it differently (if it's your server-side code handling it))
You're setting objectName[indexName].queryAction to an empty string, then assigning this value to JSONtoTransfer (now always going to be an empty string)
You have your for syntax a bit muddled up. Best practice would be to change
for (var i = 0, len = Object.keys(objectName).length; i < len; ++i) {
to
for (var i = 0; i < Object.keys(objectName).length; ++i) {
i.e. there's no need to keep initialising len to the same value. NOTE: This is more for readability, not (so much) performance. If you had another use for len inside the loop this advice wouldn't apply.
Your variable objectName is in fact JSON data already. I might be wrong but I think this should work (with less code):
var jsonData = {
"name1": {
"queryName": "longname1",
"queryAction": "JSONtoSend"
},
"name2": {
"queryName": "longname2",
"queryAction": "JSONtoSend"
},
};
//Post with AJAX
$.post('url.php', jsonData, 'json')
.done(function(data) {
alert('Succes!')
})
.fail(function(data) {
alert('Failed!')
});
//This does the same (Post with AJAX)
$.ajax({
url: 'url.php', //Get action attribute of the form
type: "POST",
data: jsonData,
dataType: "json",
.done(function() { //or success: function() {
alert( "success" );
})
.fail(function() { //or error: function() {
alert( "error" );
})
.always(function() { //or beforeSend: function() {
alert( "complete" );
});
});
I am not sure what you want but as pointed out by others there are many issues with your code, but i think you want to execute ajax call one after the other iteratively. if that is what you want then take a look at jQuery deffered -docs are here.Hope that helps
I am trying to run through a array send to a php file and on a callback send the next value after the php has completed its download. Here what i have so far.
my array come through as follows.
["http://example.com/test1.zip", "http://example.com/test2.zip", "http://example.com/test3.zip", "http://example.com/test4.zip", "http://example.com/test5.zip"]
above is the output from console.log(values); below. it grabs some urls from checkbox values.
$('.geturls').live('click',function(){
var values = new Array();
$.each($("input[name='downloadQue[]']:checked"), function() {
values.push($(this).val());
ajaxRequest($(this).val(),function(response){
console.log(response);
});
});
console.log(values);
return false;
});
this then calls a ajax function which i am trying to do a callback on.
function ajaxRequest(urlSend,callback){
var send = {
url: urlSend
}
$.ajax({
type: "POST",
url: "<?php echo base_url(); ?>index.php/upload",
data: send,
//dataType: "json",
//timeout: 8000,
beforeSend: function() {
},
success: function(response) {
callback('added');
},
error: function (response) {
callback('false');
}
});
}
this will then send to a php file.
function upload(){
$output = shell_exec("wget {$_POST['url']} 2>&1");
return true;
}
What i am trying to do is after the callback from one url which it has download fully then grab the next value from the array and download that url and so on until all the urls in the array are downloaded fully.
at the moment it just downloads the first value and then crashes because it doesn't restart the loop after a return value of true is returned.
Hope this makes sense to someone just looking for some help on the best way to loop through an array of values with a callback after complete.
May be this structure can help you. In this variant you go next URL only after successful completion of the previous Ajax call.
var arr = ['url0','url1','url2','url3'];
var index = 0;
function Run(){
DoAjax(arr[index]);
}
function Next( ){
if(arr.count = index-1)
{
index =0;
return;
}else{
DoAjax(arr[index ]);
}
}
function DoAjax(url){
$.ajax({
type: "POST",
url: url,
data: send,
beforeSend: function() {
},
success: function(response) {
index ++;
Next();
// Addition logic if needed
},
error: function (response) {
}
});
}
Run()
Now that I have a bit more time, I thought it would be good to show an alternative which takes advantage of the fact that jquery ajax is now implemented as a deferred. Meaning you can use pipe chaining to do all the work for you. I've also eliminated the callbacks by taking advantage of the deferred behavior.
This should give you the idea.
// Use jquery deferred pipe chaining to force
// async functions to run sequentially
var dfd = $.Deferred(),
dfdNext = dfd,
x,
values = [],
// The important thing to understand here is that
// you are returning the value of $.ajax to the caller.
// The caller will then get the promise from the deferred.
ajaxRequest = function (urlSend) {
var send = {
url: urlSend
}
return $.ajax({
type: "POST",
url: "<?php echo base_url(); ?>index.php/upload",
data: send,
});
};
// Starts things running. You should be able to put this anywhere
// in the script, including at the end and the code will work the same.
dfd.resolve();
// Deferred pipe chaining. This is the main part of the logic.
// What you want to note here is that a new ajax call will
// not start until the previous
// ajax call is completely finished.
// Also note that we've moved the code that would
// normally be in the callback.
// Finally notice how we are chaining the pipes by
// replacing dfdNext with the return value from the
// current pipe.
for (x = 1; x <= 4; x++) {
values.push(x);
dfdNext = dfdNext.pipe(function () {
var value = values.shift();
return requestAjax(value).
done(function(response) {
// Code here that you would have
// put in your callback.
console.log(response);
}).
fail(function(response) {
console.log(response);
};
});
}
Working example you can play with on jsFiddle.