Run ajax request until it returns results - javascript

I currently rely on a simple ajax call to fetch some data from our query service api. It is unfortunately not the most robust api and can at times return an empty result set. As such, I want to retry the ajax call until resultSet.length > 0.
I could use setTimeOut and break the loop if I find results, but it seems like an inelegant solution, especially as the time to completion is anywhere between 1s and 6s. I currently have the following, but it doesn't seem to break the loop when needed, and remains inelegant. Any help would be appreciated!
var resultSet = 0;
function fetchQueryData(query, time, iter) {
(function myLoop(i){
if (i == iter) {
fetchData(resultSet, dataset, query);
} else {
setTimeout(function(){
if (resultSet == 0) {
fetchData(resultSet, dataset, query);
}
if (--i) myLoop(i);
}, time)
}
})(iter);
}
fetchQueryData('select * from table', 6000, 5);
function fetchData(resultSet, dataset, query) {
var dataString = 'query=' + encodeURIComponent(query);
$.ajax({
type : "POST",
data: dataString,
url : "/queryapi",
success: function(json) {
var data = [];
var schema = json.data.schema;
var rows = json.data.rows;
if (typeof schema != 'undefined') {
resultSet = 1;
for (var i = 0; i < rows.length; i++) {
var obj = {};
for (var j = 0; j < schema.length; j++) {
obj[schema[j]['name']] = rows[i][j];
}
data.push(obj);
}
}
});
}

Instead of using a setTimeout, wrap the request in a function, and call that same function in the success callback of the request if the returned set is empty.
This will prevent you from sending more than one request to your API at a time, and will also terminate as soon as you get back a satisfactory response.
(In short, you're using recursion instead of an explicit loop.)

Related

Wait until all Ajax requests in a for loop are done before moving on?

I have to make a call to the Agile Central API to get a list of defect suites and then iterate through the list and make a nested call to get the list of defects in each suite, the nested call depends on the outer call. I then have to append the rows of data to a table and then call doneCallback() to signal the end of data collection. The problem I'm having is that doneCallback() is being called before the requests have completed so none of the data is actually passed on
I've tried the approaches in this post: Wait until all jQuery Ajax requests are done? and this post: how to wait until Array is filled (asynchronous). In the console I can see that all the data I want is there but nothing gets appended. My question is: how can I make sure I don't call doneCallback() until all the requests that are made in the loop have finished and pushed the data? Here's my code right now:
function getSuites() {
return $.ajax({
url: suitesURL("71101309592") + "&fetch=Name,FormattedID,Defects",
type: "GET",
xhrFields: {
withCredentials: true
},
headers: {
"zsessionid": apiKey
}
});
}
function getDefects(_ref) {
return $.ajax({
url: _ref,
type:"GET",
xhrFields: {
withCredentials: true
},
headers: {
"zsessionid": apiKey
}
});
}
// Download the data
myConnector.getData = function (table, doneCallback) {
console.log("Getting Data...");
var ajaxCalls = [], tableData = [];
var suitesJSON = getSuites();
suitesJSON.done(function(data) {
var suites = data.QueryResult.Results;
for(var i = 0; i < suites.length; i++) {
(function(i) {
var defectsJSON = getDefects(suites[i].Defects._ref + "?fetch=Name,FormattedID,State,Priority,CreationDate,c_RootCause,c_RootCauseCRM");
ajaxCalls.push(defectsJSON);
defectsJSON.done(function(data) {
var defects = data.QueryResult.Results;
for(var j = 0; j < defects.length; j++) {
tableData.push({
"suiteName": suites[i].Name, // This is the name of the suite collected in the outer call
"defectName": defects[j].Name,
"FormattedID": defects[j].FormattedID,
"State": defects[j].State,
"Priority": defects[j].Priority,
"CreationDate": defects[j].CreationDate,
"RootCause": defects[j].c_RootCause,
"RootCauseCRM": defects[j].c_RootCauseCRM
});
}
});
})(i);
}
});
$.when.apply($, ajaxCalls).then(function() {
console.log(tableData);
table.appendRows(tableData);
doneCallback();
});
};
You should use a better model to get multiple items. Using a for loop to query for multiple gets is the problem, and the solution should be to refactor so that you make one request that returns everything you need.
If this doesn't seem possible to you, I've researched a way to do what you want in jQuery.
$.when(
$.get(path, callback), $.get(path, callback), $.get(path, callback)
.then({
//This is called after all requests are done
});
You could create an array of all your requests like [$.get(path, callback), request2, request 3, etc...] and then use the spread method to put them as arguments like
var args = [$.get(path, callback), request2, request 3, etc...];
$.when(...args).then(() => {/*call here*/});
This link has the rest of the information
https://css-tricks.com/multiple-simultaneous-ajax-requests-one-callback-jquery/
I think the problem is that you are calling $.wait right after getSuites() is executed.
$.wait 'sees' the ajaxCalls array empty (because getSuites() hasn't finish yet) and executes doneCallback().
Try to call $.wait INSIDE the suitesJSON.done function, that way it will be called after the ajaxCalls array is filled with the first response:
myConnector.getData = function (table, doneCallback) {
console.log("Getting Data...");
var ajaxCalls = [], tableData = [];
var suitesJSON = getSuites();
suitesJSON.done(function(data) {
var suites = data.QueryResult.Results;
for(var i = 0; i < suites.length; i++) {
(function(i) {
var defectsJSON = getDefects(suites[i].Defects._ref + "?fetch=Name,FormattedID,State,Priority,CreationDate,c_RootCause,c_RootCauseCRM");
ajaxCalls.push(defectsJSON);
defectsJSON.done(function(data) {
var defects = data.QueryResult.Results;
for(var j = 0; j < defects.length; j++) {
tableData.push({
"suiteName": suites[i].Name, // This is the name of the suite collected in the outer call
"defectName": defects[j].Name,
"FormattedID": defects[j].FormattedID,
"State": defects[j].State,
"Priority": defects[j].Priority,
"CreationDate": defects[j].CreationDate,
"RootCause": defects[j].c_RootCause,
"RootCauseCRM": defects[j].c_RootCauseCRM
});
}
});
})(i);
}
$.when.apply($, ajaxCalls).then(function() {
console.log(tableData);
table.appendRows(tableData);
doneCallback();
});
});
};

Wait for loop to finish $.getJSON for each array item before outputting data

I've got an array of names which I need to retrieve data from, and I'm currently doing this with $.getJSON inside a loop. It works and I can retrieve the data, but to output the correct value I need to use setTimeout or similar. I'm wondering if there's a more refined way of doing what I'm looking to achieve.
Here's what I've got.
var names = ["riotgames", "example"];
var online = [];
for (var i = 0; i < names.length; i++) {
$.getJSON('https://api.twitch.tv/kraken/streams/' + names[i], function(data) {
if (data.stream != null) {
online.push(data.stream.channel.display_name);
};
});
}
console.log(online) // outputs []
setTimeout(function() {
console.log(online) // outputs correctly
}, 1000);
<script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script>
When doing $.getJSON, you are triggering asynchronous requests. This means they run in the background. You do not wait for them to finish, their callbacks will trigger (like an event) once the request is done.
This means you cannot access online from outside the callback(s).
If you want to "wait" for all the requests to finish, then I suggest using promises. You can use $.when to combine all the requests into one promise then run a callback once everything is done.
var names = ["riotgames", "example"];
var promises = [];
for (var i = 0; i < names.length; i++) {
// $.getJSON returns a promise
promises.push($.getJSON('https://api.twitch.tv/kraken/streams/' + names[i]));
}
// Combine all promises
// and run a callback
$.when.apply($, promises).then(function(){
var online = [];
// This callback will be passed the result of each AJAX call as a parameter
for(var i = 0; i < arguments.length; i++){
// arguments[i][0] is needed because each argument
// is an array of 3 elements.
// The data, the status, and the jqXHR object
online.push(arguments[i][0].stream.channel.display_name);
}
console.log(online);
});

using $q.all for making synchronous http calls not working angularjs

I have encountered the following problem and I am unable to proceed. I tried some of the solutions that were posted in different question, but couldn't get it to work
In my controller, I have a $scope.init() function. I have a for loop in it which calls a function to make http.get calls to different urls, each url depends on the previous call's data, so I need it to be synchronous
$scope.init = function() {
decodedURL = $routeParams.url;
//evaluate some variables, ampIndex is > -1 here
for( var i=0; ampIndex > -1; ++i)
{
decodedURL = decodedURL.substring(ampIndex+1, decodedURL.length);
ampIndex = decodedURL.indexOf("&");
$scope.getNextList(i);
/* above function call makes the http.get call to the currentURL based on
decodedURL, and the data is stored in variable[i+1], so for the next
iteration, the calls should be synchronous
*/
$q.all(asyncCall).then(function (data) {var j;} );
/* I wrote the above dummy statement so that it is executed only after
http.get in $scope.getNextList() function is successful, but it is
not working
*/
}
};
$scope.getNextList = function(index) {
// $currentURL is calculated
var hello = _helpers.server.http($http, $scope.currentURL) {
.success( function(response) {
})
.error( fucntion(errResponse) {
});
asyncCall.push(hello);
};
How do I solve this problem?
How about something along these lines?
http://plnkr.co/edit/pjWbNX1lnE2HtaNs1nEX?p=preview
$scope.init = function ( ){
for (var i=0; i < 10; i++) {
$scope.getNextList(i) // push calls into array
};
var index = 0;
function makeCall() {
$scope.asyncCall[index]
.success(function(data) {
if (index < $scope.asyncCall.length - 1) {
console.log(index);
index += 1;
makeCall();
}
else {
console.log(index); // last call
}
})
}
makeCall();
};

jQuery AJAX Requests in loop Array issue

I've got some AJAX requests in a for loop. The loop steps through an array of URLs and sends requests to all of them, checks for something on each of the pages, and then pages containing the right data will have some data written to an array. The array is a 'global' variable.
The loops works fine. The requests work fine. The array isn't working properly. The done() function for request 1 will push to the array, but the next done() will just overwrite. This consistently happens where done() calls in rapid succession will overwrite sections of the array instead of adding to it. I don't know why this is happening.
I understand the AJAX calls come back in odd orders and timings, but I thought my use of array.push() would get around the need for specifying index's. What's going on?
var globalArray = [];
for (var i = 0; i < URLList.length; i++)
{
(function (i) {
$.ajax(
{
url: URLList[i],
cache: false
}).done(function(html)
{
if (html.indexOf('someString') != -1)
{
globalArray.push(URLList[i]);
}
});
})(i);
}
So basically, even if the second done() triggers after the first done(), the second done's array.push() will pretend like the array is empty. I don't know why it's not checking each time.
Improvement for #Varrinder 's answer
var URLList = ["url1", "url2", "url3"];
var ajaxResulstArray = [];
makeRequest();
function makeRequest() {
var urlToRequest = URLList.shift();
if (urlToRequest != undefined) {
$.ajax({
url: urlToRequest, cache:false
}).done(function(html) {
if (html.indexOf("someString") != -1) {
ajaxResulstArray.push(urlToRequest);
}
}).always(makeRequest);
}
}
Hope im not completly missing the point here.
I'd try Array.shift()
Something like below:
var URLList = ["url1", "url2", "url3"];
var ajaxResulstArray = [];
makeRequest();
function makeRequest() {
var urlToRequest = URLList.shift();
if (urlToRequest != undefined) {
$.ajax({
url: urlToRequest, cache:false
}).done(function(html) {
if (html.indexOf("someString") != -1) {
ajaxResulstArray.push(urlToRequest);
}
makeRequest();
});
}
}
I have declared a global integer.
So far, this now seems to work. I can only surmise that push() takes too long to figure things out. Incidentally, in my console.log I also check the globalArray.length. Surprisingly, it doesn't seem to keep up very well with the globalArrayIndex method I'm not using. I guess .length is really slow, so many quick succession done() calls caused the issue.
var globalArray = [];
var globalArrayIndex = 0;
for (var i = 0; i < URLList.length; i++)
{
(function (i) {
$.ajax(
{
url: URLList[i],
cache: false
}).done(function(html)
{
if (html.indexOf('someString') != -1)
{
globalArray[globalArrayIndex] = (URLList[i]);
globalArrayIndex++;
}
});
})(i);
}

Handling multiple call asynchronous callbacks

I am learning node.js with learnyounode.
I am having a problem with JUGGLING ASYNC.
The problem is described as follows:
You are given three urls as command line arguments. You are supposed to make http.get() calls to get data from these urls and then print them in the same order as their order in the list of arguments.
Here is my code:
var http = require('http')
var truecount = 0;
var printlist = []
for(var i = 2; i < process.argv.length; i++) {
http.get(process.argv[i], function(response) {
var printdata = "";
response.setEncoding('utf8');
response.on('data', function(data) {
printdata += data;
})
response.on('end', function() {
truecount += 1
printlist.push(printdata)
if(truecount == 3) {
printlist.forEach(function(item) {
console.log(item)
})
}
})
})
}
Here is the questions I do not understand:
I am trying to store the completed data in response.on('end', function(){})for each url using a dictionary. However, I do not know how to get the url for that http.get(). If I can do a local variable inside http.get(), that would be great but I think whenever I declare a variable as var url, it will always point to the last url. Since it is global and it keeps updating through the loop. What is the best way for me to store those completed data as the value with the key equal to the url?
This is how I would go about solving the problem.
#!/usr/bin/env node
var http = require('http');
var argv = process.argv.splice(2),
truecount = argv.length,
pages = [];
function printUrls() {
if (--truecount > 0)
return;
for (i = 0; i < pages.length; i++) {
console.log(pages[i].data + '\n\n');
}
}
function HTMLPage(url) {
var _page = this;
_page.data = '### [URL](' + url + ')\n';
http.get(url, function(res) {
res.setEncoding('utf8');
res.on('data', function(data) {
_page.data += data;
});
res.on('end', printUrls);
});
}
for (var i = 0; i < argv.length; i++)
pages.push(new HTMLPage(argv[i]));
It adds the requests to an array on the start of each request, that way once done I can iterate nicely through the responses knowing that they are in the correct order.
When dealing with asynchronous processing, I find it much easier to think about each process as something with a concrete beginning and end. If you require the order of the requests to be preserved then the entry must be made on creation of each process, and then you refer back to that record on completion. Only then can you guarantee that you have things in the right order.
If you were desperate to use your above method, then you could define a variable inside your get callback closure and use that to store the urls, that way you wouldn't end up with the last url overwriting your variables. If you do go this way though, you'll dramatically increase your overhead when you have to use your urls from process.argv to access each response in that order. I wouldn't advise it.
I went about this challenge a little differently. I'm creating an array of functions that call http.get, and immediately invoking them with their specifc context. The streams write to an object where the key is the port of the server which that stream is relevant to. When the end event is triggered, it adds to that server to the completed array - when that array is full it iterates through and echos in the original order the servers were given.
There's no right way but there are probably a dozen or more ways. Wanted to share mine.
var http = require('http'),
request = [],
dataStrings = {},
orderOfServerInputs = [];
var completeResponses = [];
for(server in process.argv){
if(server >= 2){
orderOfServerInputs[orderOfServerInputs.length] = process.argv[server].substr(-4);
request[request.length] = function(thisServer){
http.get(process.argv[server], function(response){
response.on("data", function(data){
dataStrings[thisServer.substr(-4)] = dataStrings[thisServer.substr(-4)] ? dataStrings[thisServer.substr(-4)] : ''; //if not set set to ''
dataStrings[thisServer.substr(-4)] += data.toString();
});
response.on("end", function(data){
completeResponses[completeResponses.length] = true;
if(completeResponses.length > 2){
for(item in orderOfServerInputs){
serverNo = orderOfServerInputs[item].substr(-4)
console.log(dataStrings[serverNo]);
}
}
});
});
}(process.argv[server]);
}
}
Immediately-Invoked Function Expression (IIFE) could be a solution to your problem. It allows us to bind to function a specific value, in your case, the url which gets the response. In the code below, I bind variable i to index and so, whichever url gets the response, that index of print list will be updated. For more information, refer to this website
var http = require('http')
var truecount = 0;
var printlist = [];
for(var i = 2; i < process.argv.length; i++) {
(function(index){
http.get(process.argv[index], function(response) {
response.setEncoding('utf8');
response.on('data', function(data) {
if (printlist[index] == undefined)
printlist[index] = data;
else
printlist[index]+= data;
})
response.on('end', function() {
truecount += 1
if(truecount == 3) {
printlist.forEach(function(item) {
console.log(item)
})
}
})
})
})(i)
}

Categories

Resources