Performance multiple ajax call in Javascript - javascript

I am developing a module in Javascript, that in some strange cases, will launch multiple AJAX call to a web service in a range of Ips. For example:
var _requests = [],
_found = false;
for( var i = 1; (i < 255 && !_found); i++ ){
_requests.push( $.ajax({
url: "http://192.168.1." + i + "/service?action=example",
type: "GET",
success: _callback
}) );
}
var _callback = function( data, status, petitionInfo ) {
_found = true;
var _requestsToCancel = _requests.length;
while( _requestsToCancel-- ){
_requests[_requestsToCancel].abort();
}
};
In terms of performance, is it necessary (convenient), cancel every AJAX petition? Or it is irrelevant? When one ip responses, no other ip will do it. Putting a low timeout would be better for performance?

As you have already started the requests, each server will receive a request.
I'm assuming however that only one will respond and the others will time out. In which case I would recomend cancelling the other requests to save resources on the client.

Related

Javascript timer for rate limited API

I'm using an API - 'Have I been pwned?' which is rate limited - "limited to one per every 1500 milliseconds". I have looked at quite a few other questions on here, researched via google and a couple of other forum sites as well as tried myself to find a solution to this one.
Does the Javascript function
setInterval()
Really work for this kind of issue or problem? Has anyone found a solution that effectively works? I'm kinda at my wit's end with this one as
var url = "https://haveibeenpwned.com/api/v2/breachedaccount/";
var breach = Array();
setInterval($.ajax({
url: url,
type: 'GET',
dataType: 'JSON',
success: function(data) {
breach[] = data;
}), 15000);
Does not seem to work, especially where my current project is storing the information for multiple email addresses. So for example if I store 4 email addresses in an array and want to loop through but wait the 1500 ms before hitting the API again to query for the next email address.
Any ideas anyone? or is there a NodeJS solution to this that might work as I've been learning that recently too.
Thank you in advance
I would create a queue with requests and process them one by one.
function myRequestQueue()
{
this.queue = [];
this.timer = null;
this.add = function(params)
{
this.queue.push(params);
}
this.processQueue = function()
{
if (this.queue.length > 0)
{
$.ajax(this.queue.shift());
}
}
this.timer = setInterval(this.proceesQueue,1500)
}
Usage:
var myQ = new myRequestQueue(), breach = [];
for(var i=0;i<100;i++){
myQ.add({
url: url,
dataType: 'JSON',
success: function(data) { breach.push(data) }});
} // 100 requests
// expect them to be processed one by one every 1500 ms
You may want to add a callback when the queue is emptied or something depending on your exact use case

Jquery Ajax GET call different results

I've written a few lines of code to tackle the following problem:
Get the TwitchTV UserID based on the username. The usernames are in an arrray. After hitting a button a GET AJAX request will be called to GET the UserID and to push it in another array and the call to the API will made via AJAX after hitting a button.
My problem is that if I hit the button again the usersID are in wrong order.
If I set async: false, it works.
Is this problem because of Asynchronous AJAX? What would be the approach for a valid fix? Use callback? A hint in the right direction would be appreciated.
The comments in the code are for testing.
Code:
<script>
var users = ["ESL_SC2", "OgamingSC2", "cretetion", "freecodecamp", "storbeck", "habathcx", "RobotCaleb", "spicyjewlive"];
clientID = "?client_id=XXX";
userID = [];
userStatus = [];
for(var i=0; i<users.length; i++){
idCall(users[i]);
};
function idCall (data){
$.ajax({
type: "GET",
url: "https://api.twitch.tv/kraken/users/" + data + clientID,
async: false,
cache: false,
dataType: "jsonp",
success: function (data){
console.log(data._id);
},
error: function (data) {
console.log("error");
}});
};
</script>
Use an array of request promises and update the dom after all those are resolved. The results will be in same order as original users array
var requestPromises = users.map(idCall);
$.when.apply(null, requestPromises).then(function(){
var dataArray = [].slice.call(arguments);
// now loop over data array which is in same order as users
dataArray.forEach(function(userData){
// do something with each userData object
});
}).fail(function(){
console.log("Something went wrong in at least one request");
});
function idCall(user) {
// return the ajax promise
return $.ajax({
url: "https://api.twitch.tv/kraken/users/" + user + clientID,
dataType: "jsonp"
}).then(function(data) {
// return the data resolved by promise
return data;
});
};
What would be the approach for a valid fix?
I think #charlieftl answer is a good one. I don't think I can really improve on this. I've mainly added this to try and explain your difficulties here. As I mentioned in a previous question you posted, you need to read and understand How do I return the response from an asynchronous call?.
If you had control of the server side then a simpler option is to send the array server side in order and use this to maintain the order, but your using a third party API so this isn't really practical.
A simpler option would be to use a callback method:
var users = ["ESL_SC2", "OgamingSC2", "cretetion", "freecodecamp", "storbeck", "habathcx", "RobotCaleb", "spicyjewlive"];
var clientID = "?client_id=XXX";
var userID = [];
var userStatus = [];
idCall(users);
function idCall (users, x=0){
if (x < users.length)
{
var data = users[x];
$.ajax({
type: "GET",
url: "https://api.twitch.tv/kraken/users/" + data+ clientID,
cache: false,
dataType: "jsonp",
success: function (data){
console.log(data._id);
},
error: function (data) {
console.log("error");
}})
.then(function(){
idCall(users, x++);
});
}
};
though like I said, charlies answer is better. This will guarantee the order and won't lock the UI. But it's slow.
Is this problem because of Asynchronous AJAX?
Yes, well asynchronous processing on a server and ordering.
When you first this open up the network panel of your debugging tool and watch the HTTP requests. What you'll see is a load of requests getting sent off at the same time but returning in different orders. You cannot guarantee the response order of a async request. It depends on the server response time, load, etc.
Example:
These request we're sent in order A,B,C,D,E,F,G but the responses we're received in B,C,E,F,G,D,A this is the nub of your problem.
Setting async to false does this:
Now no request will be sent until the previous has returned but. So stuff is sent A-G and returns A-G but this is A LOT slower than the previous example (the graphs here aren't to scale, I struggled to capture a good async:false example as I never use it, see reasons below).
Never do this (pretty much ever). What will happen is the UI thread will become locked (Javascript/browsers are single threaded, mostly) waiting for an external call and your web site will become unresponsive for long periods of time. Locking this thread doesn't just stop Javascript, it stops the entire site, so links don't work, hover overs, everything, until the async call returns the thread back to the UI.
Yes, it is because of the AJAX calls. One AJAX call might finish before the other and therefore the different order.
An obvious solution is to use jQuery promises but another solution to this problem could be that you sort your array after your AJAX calls have been completed.
You can sort your array according to the username string like this:
data.sort();
If you have an array of objects, which you don't in the example code, you can sort it like this:
data.sort(function (a, b) { return (a.Name > b.Name) ? 1 : ((b.Name > a.Name) ? -1 : 0); } );
After going through the comments, I thought to give some more code that can a better idea on how to solve this.
<script>
var users = ["ESL_SC2", "OgamingSC2", "cretetion", "freecodecamp", "storbeck", "habathcx", "RobotCaleb", "spicyjewlive"];
clientID = "?client_id=XXX";
userID = [];
userStatus = [];
var count = 0;
for(var i=0; i<users.length; i++){
idCall(users[i]);
}
function idCall (data){
$.ajax({
type: "GET",
url: "https://api.twitch.tv/kraken/users/" + data + clientID,
async: false,
cache: false,
dataType: "jsonp",
success: function (data){
console.log(data._id);
userID.push(data._id);
count++;
if( count === users.length - 1 ) {
userID.sort();
}
},
error: function (data) {
console.log("error");
}
});
}

Ajax request succeeds in Chrome but fails in Firefox

I have a javascript function that makes an ajax call to a PHP page for some database transactions and data processing. Here is the function:
function processQuizResults()
{
console.log("Processing results...");
var selections = [];
for (var i = 1; i <= 8; i++)
{
var thestr = "#sel" + i;
$(thestr).is(':checked') ? selections.push(1) : selections.push(0);
}
var URLcode = '<?php echo $URLcode; ?>';
var ajaxURL = "/processNewData.php?qid=" + URLcode + "&res=";
for (var i = 0; i < 8; i++)
{
ajaxURL += selections[i];
}
$.ajax({
url: ajaxURL,
type: 'post'
})
.done(function () {
console.log("ajax success.");
})
.fail(function () {
console.log("ajax failure.");
});
}
In Chrome, the browser makes the Ajax call, I see the breakpoints in my processNewData.php get hit (if I have some set) in visual studio, and the data comes out clean and showing up in my database properly on the other side. However, in firefox, running this exact same function has the $.ajax go into its .fail method, and the processNewData.php code never gets executed.
I have no idea how to debug an issue such as this and I have no idea what could be causing the problem. Can anyone kindly tell me where I'm going wrong?
According to this answer you're probably running into CORS issues.
From what domains are you serving the server-side app and the front-end? Try to make sure they're on the same domain and see if you get the same error.

Ajax Call in For Loop, For Loop Completing Before all Calls are Made

I have created a for loop that loops the number of times that an element appears in a container. The for loop grabs some data from the HTML and creates a JSON url which will then return a value. That value should then be added to the HTML in the appropriate place.
The problem seems that the for loop completes before all of the Ajax calls are made, so only the last value is being added to the HTML. I thought that I could make sure that the readystate is equal to 4, but that solution did not work. I also tried using complete, rather than success as an Ajax Event. Any insights? Here is my the code.
for(var index = 0; index < $('#wizSteps #step6 label').length; index++){
var priceCount;
console.log(index);
var currentSelect = $('#wizSteps #step6 label[data-pricepos="'+index+'"]');
url = 'http://www.thesite.com/api/search.json?taxonomy=cat3435' + currentSelect.find('input').attr('name');
jQuery.ajax({
url: url,
dataType: "JSON",
success: function( data ){
var totalResult = data.totalNumberOfResults;
console.log(currentSelect);
currentSelect.find('.itemCount').text(totalResult);
}
});
}
It looks like you don't necessarily need the requests to finish in order, you just need to keep track of currentSelect in a way that works. For that, you can use the context ajax option:
for (var index = 0; index < $('#wizSteps #step6 label').length; index++) {
var currentSelect = $('#wizSteps #step6 label[data-pricepos="' + index + '"]');
url = 'http://www.thesite.com/api/search.json?taxonomy=cat3435' + currentSelect.find('input').attr('name');
jQuery.ajax({
url: url,
dataType: "JSON",
context: currentSelect,
success: function (data) {
var totalResult = data.totalNumberOfResults;
this.find('.itemCount').text(totalResult);
}
});
}
That is ok, the calls are not supposed to be done this way. They are only initiated in the loop.
Ajax is asynchronous. The queries are completed later, may be in different order.
If you want to be sure that every call is completed before you do the next one,
you must integrate the next call into the callback function of the previous.
In your case the variable may be overwritten in the call back function.
You can learn more on this here:
Asynchronous Javascript Variable Overwrite
Another interesting question/discussion related to the topic:
What are the differences between Deferred, Promise and Future in JavaScript?
It does not directly answer your question, but helps to understand the problem deeper.
The point is that you probable don't need the loop at all (or you do but in a completely different form).
You should try creating a recursive function, that you will call again in the success of the ajax call, this way you will be sure that the next ajax call will be called only once the previous call is done.
If you want the requests in a sequence, you can work with a queue.
First build the queue:
var queue = [],
index,
stepLength = $('#wizSteps #step6 label').length;
for(index = 0; index < length; index++){
var priceCount;
console.log(index);
var currentSelect = $('#wizSteps #step6 label[data-pricepos="'+index+'"]');
url = 'http://www.thesite.com/api/search.json?taxonomy=cat3435' + currentSelect.find('input').attr('name');
queue.push([url, currentSelect]);
}
And after that do the serial ajax requests:
function serialAjax() {
if(queue.length === 0) {
return;
}
var queueData = queue.shift(),
url = queueData[0],
currentSelect = queueData[1];
jQuery.ajax({
url: url,
dataType: "JSON",
success: function( data ){
var totalResult = data.totalNumberOfResults;
console.log(currentSelect);
currentSelect.find('.itemCount').text(totalResult);
serialAjax();
}
});
};
// call the function
serialAjax();

JavaScript for loop with innerHTML not updating during loop execution

I'm looping through an array, and during each iteration of the loop, I'm calling a url through ajax. I'd like to also update an .innerHTML such that it displays to keep the user informed as to which iteration of the loop is being processed. However, .innerHTML only displays the update when the script completes.
How can I make this notification display during my loop?
I'm also using the query ajax setting 'async: false'. I don't want to hammer my server with processing all of the ajax requests at once, as they are encoding video files which is CPU intensive. I don't really want to lock the browser up waiting for synchronous requests to complete either.
Is there a better way to do this?
My ultimate goal is to sequentially execute my combine.php script for each set of videos, while displaying a current status indicator to the user, and while not locking the browser up in the process. Your help is appreciated!
Code snippet here:
// process the loop of videos to be combined
var status = document.getElementById('currentStatus');
for (i=0; i< count; i++) {
// change the display
var fields = videos[i].split(":", 2);
current = i +1;
currentStatus.innerHTML = "<b>Multi-part Videos:</b> <h3 class='status'>Currently Updating Bout #" + fields[1] + " (" + current + " of " + count + " videos)</h3>";
// run the combine
var dataString = 'videoId='+ fields[0];
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function(txt) {
//deselect the checkbox
document.combine.video[selected[i]].checked = false;
},
async: false
});
async: false will hang the entire browser until the ajax request completes. That is why you don't see the page update on each loop iteration.
Synchronous ajax requests typically make for terrible UX (do you like the page to freeze inexplicably?) and there is almost always a better way to do it. Since you're using jQuery, the Deferred Object API makes this easy.
As others have alluded, your problem is caused because JavaScript is single threaded - while the single JS thread is waiting for your ajax request to return, it's not allowed to update the UI.
You can get around this by changing the request to async, and using the callback to trigger the request for the next object:
// trigger the loop of videos to be combined
var status = document.getElementById('currentStatus');
processVideo( 0 );
function processVideo( index ) {
var fields = videos[index].split(":", 2);
currentStatus.innerHTML = "<b>Multi-part Videos:</b> <h3 class='status'>Currently Updating Bout #" + fields[1] + " (" + current + " of " + count + " videos)</h3>";
// run the combine
var dataString = 'videoId='+ fields[0];
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function() {
processResponse( index);
},
async: true
});
}
function processResponse( index ) {
// this method is called each time the ajax request finishes
if (index++ < count) {
//deselect the checkbox
document.combine.video[selected[index]].checked = false;
processVideo( index );
}
}
If you want to update one by one while async is set to true, the next request can be put in the success callback function. The update status code should be inside that function too.
function ajaxRequest(i){
// other processing
.............
$.ajax({
type: "POST",
url: "combine.php",
data: dataString,
success: function(txt) {
//deselect the checkbox
document.combine.video[selected[i]].checked = false;
// update status
currentStatus.innerHTML = .....
// make next request
if(i<lastOne){
ajaxRequest(i+1);
}
},
async: true
});
}

Categories

Resources